Libraries

library(dplyr)
library(tidyr)
library(ggplot2)
library(lme4)
library(lmerTest)
library(flextable)
library(rio)
library(ggmosaic)
library(kableExtra)
library(maps)
library(countrycode)
library(treemapify)
library(MOTE)
library(broom)
options(scipen = 10)

Import Data

load("linear_exclude.Rdata")
linear_exclude <- output

load("linear_noexclude.Rdata")
linear_no <- output

load("log_exclude.Rdata")
log_exclude <- output

load("log_noexclude.Rdata")
log_no <- output

rm(output)

load("final_long_log_exclude.Rdata")
load("final_luck_log_exclude.Rdata")
lab_sheet <- import("lab_sheet_un_region_update.csv")

load("full_long_log_exclude.Rdata")

Modeling Comparison

In this section, we examine two questions:

  • The impact of the change in exclusion criteria: we included all CREP teams that at least were approved and excluded those who marked age as over 100 as unlikely numbers.
  • The impact of dichotomization: data screening results indicated that the model was not linear, and we dichotomized the data.
  • We ran all four combinations of these two variables to examine the results.

Data Screening

  • In order to explain why we made the decision to split the data, we examined data screening on Model 6 with all random intercepts, covariates, and the main effect of condition. Each of these is provided below. Click view data screening to view the outputs.
  • As shown in the data screening, the linear model does not meet the assumptions of normality, linearity, or homoscedasticity. Given the previous study was binary and a visual inspection of the data showed a u-shaped distribution, we choose to dichotomize the data. The logistic regression data screening shows that the assumptions of additivity and linearity of the logit for continuous predictors was met.

Linear, Pre-registered Exclusions

View data screening
# additivity
round(cov2cor(vcov(linear_no[[5]])),2)
## 7 x 7 Matrix of class "dpoMatrix"
##               (Intercept) compYes   age gender2male education condIgnorance
## (Intercept)          1.00   -0.10 -0.08       -0.03     -0.26         -0.06
## compYes             -0.10    1.00  0.15        0.00     -0.06          0.00
## age                 -0.08    0.15  1.00        0.00     -0.10          0.00
## gender2male         -0.03    0.00  0.00        1.00     -0.01          0.00
## education           -0.26   -0.06 -0.10       -0.01      1.00          0.00
## condIgnorance       -0.06    0.00  0.00        0.00      0.00          1.00
## condKnowledge       -0.06    0.00  0.00        0.00      0.00          0.50
##               condKnowledge
## (Intercept)           -0.06
## compYes                0.00
## age                    0.00
## gender2male            0.00
## education              0.00
## condIgnorance          0.50
## condKnowledge          1.00
# normality
standardized <- residuals(linear_no[[5]], type = "pearson")
hist(standardized) 

# linearity
{qqnorm(standardized)
  abline(0,1)}

# homosc
fitted <- scale(fitted.values(linear_no[[5]]))
{plot(fitted, standardized)
  abline(v = 0)
  abline(h = 0)}

Linear, New Exclusions

View data screening
# additivity
round(cov2cor(vcov(linear_exclude[[5]])),2)
## 7 x 7 Matrix of class "dpoMatrix"
##               (Intercept) compYes   age gender2male education condIgnorance
## (Intercept)          1.00   -0.10 -0.11       -0.02     -0.21         -0.05
## compYes             -0.10    1.00  0.17        0.00     -0.04          0.00
## age                 -0.11    0.17  1.00       -0.03     -0.11          0.00
## gender2male         -0.02    0.00 -0.03        1.00      0.01          0.00
## education           -0.21   -0.04 -0.11        0.01      1.00          0.00
## condIgnorance       -0.05    0.00  0.00        0.00      0.00          1.00
## condKnowledge       -0.05    0.00  0.00        0.00      0.00          0.50
##               condKnowledge
## (Intercept)           -0.05
## compYes                0.00
## age                    0.00
## gender2male            0.00
## education              0.00
## condIgnorance          0.50
## condKnowledge          1.00
# normality
standardized <- residuals(linear_exclude[[5]], type = "pearson")
hist(standardized) 

# linearity
{qqnorm(standardized)
  abline(0,1)}

# homosc
fitted <- scale(fitted.values(linear_exclude[[5]]))
{plot(fitted, standardized)
  abline(v = 0)
  abline(h = 0)}

Logistic, Pre-registered Exclusions

View data screening
# additivity
round(cov2cor(vcov(log_no[[6]])),2)
## 7 x 7 Matrix of class "dpoMatrix"
##               (Intercept) compYes   age gender2male education condIgnorance
## (Intercept)          1.00   -0.06 -0.09       -0.03     -0.28         -0.05
## compYes             -0.06    1.00  0.17       -0.01     -0.16          0.00
## age                 -0.09    0.17  1.00       -0.07     -0.11          0.00
## gender2male         -0.03   -0.01 -0.07        1.00      0.01          0.00
## education           -0.28   -0.16 -0.11        0.01      1.00          0.00
## condIgnorance       -0.05    0.00  0.00        0.00      0.00          1.00
## condKnowledge       -0.06    0.00  0.00        0.00     -0.01          0.42
##               condKnowledge
## (Intercept)           -0.06
## compYes                0.00
## age                    0.00
## gender2male            0.00
## education             -0.01
## condIgnorance          0.42
## condKnowledge          1.00
# linearity logit
df <- log_no[[6]]@frame
df$probs <- predict(log_no[[6]], type = "response")
df$logit <- log(df$probs/(1-df$probs))

ggplot(df, aes(logit, age)) + 
  geom_point() + 
  theme_classic() + 
  geom_smooth(method = "lm") + 
  facet_wrap(~vignette*cond)

ggplot(df, aes(logit, education)) + 
  geom_point() + 
  theme_classic() + 
  geom_smooth(method = "lm") + 
  facet_wrap(~vignette*cond)

Logistic, New Exclusions (Paper)

View data screening
# additivity
round(cov2cor(vcov(log_exclude[[6]])),2)
## 7 x 7 Matrix of class "dpoMatrix"
##               (Intercept) compYes   age gender2male education condIgnorance
## (Intercept)          1.00   -0.08 -0.11       -0.03     -0.24         -0.05
## compYes             -0.08    1.00  0.20        0.01     -0.12          0.00
## age                 -0.11    0.20  1.00       -0.09     -0.11         -0.01
## gender2male         -0.03    0.01 -0.09        1.00      0.03          0.01
## education           -0.24   -0.12 -0.11        0.03      1.00          0.01
## condIgnorance       -0.05    0.00 -0.01        0.01      0.01          1.00
## condKnowledge       -0.05    0.00  0.00       -0.01     -0.01          0.43
##               condKnowledge
## (Intercept)           -0.05
## compYes                0.00
## age                    0.00
## gender2male           -0.01
## education             -0.01
## condIgnorance          0.43
## condKnowledge          1.00
# linearity logit
df <- log_exclude[[6]]@frame
df$probs <- predict(log_exclude[[6]], type = "response")
df$logit <- log(df$probs/(1-df$probs))

ggplot(df, aes(logit, age)) + 
  geom_point() + 
  theme_classic() + 
  geom_smooth(method = "lm") + 
  facet_wrap(~vignette*cond)

ggplot(df, aes(logit, education)) + 
  geom_point() + 
  theme_classic() + 
  geom_smooth(method = "lm") + 
  facet_wrap(~vignette*cond)

Overall Model Comparison

  1. Intercept only model DV ~ 1
  2. Intercept model with random intercept vignette
  3. Intercept model with random intercept vignette/participant id, no linear model, would not converge
  4. Intercept model with random intercept vignette/id/lab, for the linear model no participant id was used
  5. Random intercepts and covariates
  6. Random intercepts, covariates, and condition 6a) Random intercepts, covariates, and condition by vignette 6b) Random intercepts, covariates, and condition by turk
  • These results are discussed below within each dependent variable; however, the general summary is the same for each:
    • Covariate models are all better (model 5 < model 4)
    • Condition adds to model (model 6)
    • Interaction add to model (model 6a)
    • Turk add to model (model 6b)
  • Therefore, all model decisions would be the same given different versions of exclusions or linking functions.
linear_e_AIC <- unlist(lapply(linear_exclude, AIC))
linear_n_AIC <- unlist(lapply(linear_no, AIC))
log_e_AIC <- unlist(lapply(log_exclude, AIC))
log_n_AIC <- unlist(lapply(log_no, AIC))

log_aics <- data.frame(
  "model" = c("know1", "know2", "know3", "know4", "know5", "know6", "know6a", "know6b", 
            "reason1", "reason2", "reason3", "reason4", "reason5", "reason6", "reason6a", "reason6b", 
            "luck1", "luck2", "luck3", "luck4", "luck5", "luck6", "luck6a", "luck6b"),
  "log_e_AIC" = log_e_AIC, 
  "log_n_AIC" = log_n_AIC
)

linear_aics <- data.frame(
  "model" = c("know1", "know2", "know4", "know5", "know6", "know6a", "know6b", 
            "reason1", "reason2", "reason4", "reason5", "reason6", "reason6a", "reason6b", 
            "luck1", "luck2", "luck4", "luck5", "luck6", "luck6a", "luck6b"),
  "linear_e_AIC" = linear_e_AIC, 
  "linear_n_AIC" = linear_n_AIC
)

aics <- log_aics %>% 
  full_join(linear_aics, 
            by = "model")

flextable(aics)

model

log_e_AIC

log_n_AIC

linear_e_AIC

linear_n_AIC

know1

18,881.094

11,990.258

128,678.2

82,273.01

know2

17,834.749

11,256.231

127,638.0

81,529.14

know3

17,836.749

11,258.231

know4

17,838.749

11,260.231

127,543.6

81,505.15

know5

17,554.309

11,170.385

125,503.8

80,874.04

know6

15,871.991

9,897.613

123,723.2

79,529.84

know6a

15,807.693

9,846.246

123,506.7

79,347.38

know6b

15,850.157

9,878.269

123,698.2

79,509.77

reason1

7,343.350

4,865.062

113,885.9

73,111.19

reason2

7,286.555

4,813.866

113,827.2

73,066.22

reason3

7,288.556

4,815.867

reason4

7,290.555

4,817.866

113,374.9

72,633.60

reason5

7,144.102

4,729.710

111,561.1

72,026.42

reason6

7,047.129

4,680.178

111,470.1

71,963.03

reason6a

7,025.805

4,667.614

111,453.4

71,951.05

reason6b

7,017.367

4,642.464

111,461.7

71,952.78

luck1

17,776.672

11,254.190

119,504.8

76,302.78

luck2

16,771.304

10,693.416

118,585.8

75,763.41

luck3

16,773.304

10,695.416

luck4

16,775.304

10,697.416

118,342.0

75,595.76

luck5

16,489.596

10,569.598

116,511.5

75,028.70

luck6

15,896.168

10,154.577

115,917.9

74,610.41

luck6a

15,458.374

9,914.341

115,465.3

74,339.25

luck6b

7,017.367

10,139.967

115,905.9

74,600.93

Knowledge

Condition

  • Complete output can be seen below.
  • In logistic versus linear: we find the same pattern of results for the condition variable - we cannot compare these directly because they are on different scales, however, they provide the same direction and significance decisions.
  • In exclusions: both provide the same results within the same confidence intervals, directions, and significance decisions.
View full results
# log updated exclusions
summary(log_exclude[[6]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15872.0  15947.4  -7926.0  15852.0    13885 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.7728 -0.6220 -0.4902  0.7726  3.8696 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.0000   0.0000  
##  id:vignette               (Intercept) 0.0000   0.0000  
##  vignette                  (Intercept) 0.4474   0.6689  
## Number of obs: 13895, groups:  
## person_code:(id:vignette), 13895; id:vignette, 13895; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value Pr(>|z|)    
## (Intercept)   -0.183810   0.404093  -0.455   0.6492    
## compYes        0.019364   0.044633   0.434   0.6644    
## age            0.003733   0.001997   1.870   0.0615 .  
## gender2male   -0.084539   0.042604  -1.984   0.0472 *  
## education     -0.017069   0.007528  -2.267   0.0234 *  
## condIgnorance -1.313222   0.050238 -26.140   <2e-16 ***
## condKnowledge  0.611665   0.044435  13.765   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.075                                   
## age         -0.107  0.197                            
## gender2male -0.030  0.014 -0.090                     
## education   -0.235 -0.118 -0.115  0.035              
## condIgnornc -0.049  0.001 -0.007  0.011  0.008       
## condKnowldg -0.054  0.001  0.002 -0.006 -0.005  0.431
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# log pre-registered exclusions
summary(log_no[[6]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   9897.6   9968.6  -4938.8   9877.6     8903 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.0827 -0.5849 -0.4672  0.7226  4.2283 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.0000   0.0000  
##  id:vignette               (Intercept) 0.0000   0.0000  
##  vignette                  (Intercept) 0.5193   0.7206  
## Number of obs: 8913, groups:  
## person_code:(id:vignette), 8913; id:vignette, 8913; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value Pr(>|z|)    
## (Intercept)   -0.288294   0.441765  -0.653    0.514    
## compYes        0.076833   0.055215   1.392    0.164    
## age            0.002106   0.001976   1.066    0.286    
## gender2male   -0.018580   0.052580  -0.353    0.724    
## education     -0.016070   0.010021  -1.604    0.109    
## condIgnorance -1.383171   0.064406 -21.476   <2e-16 ***
## condKnowledge  0.760795   0.056327  13.507   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.058                                   
## age         -0.093  0.175                            
## gender2male -0.033 -0.011 -0.072                     
## education   -0.283 -0.163 -0.114  0.014              
## condIgnornc -0.055  0.000  0.003  0.004  0.003       
## condKnowldg -0.062  0.003  0.004 -0.005 -0.007  0.418
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# linear updated exclusions
summary(linear_exclude[[5]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: know_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond
##    Data: final_long
## 
## REML criterion at convergence: 123703.2
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.2524 -0.7884 -0.1638  0.8945  2.7775 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)   34.03   5.833  
##  vignette             (Intercept)  181.98  13.490  
##  Residual                         1323.26  36.377  
## Number of obs: 12325, groups:  person_code:vignette, 108; vignette, 3
## 
## Fixed effects:
##                  Estimate  Std. Error          df t value Pr(>|t|)    
## (Intercept)      49.44543     8.12911     2.33777   6.083   0.0176 *  
## compYes          -0.16200     1.01608  1218.64845  -0.159   0.8734    
## age              -0.04391     0.03910  6215.65145  -1.123   0.2615    
## gender2male      -0.15620     0.74241 12246.61532  -0.210   0.8334    
## education        -0.36543     0.13450 10237.22452  -2.717   0.0066 ** 
## condIgnorance   -22.38826     0.80320 12225.36039 -27.874   <2e-16 ***
## condKnowledge    12.29759     0.80344 12228.00760  15.306   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.102                                   
## age         -0.106  0.167                            
## gender2male -0.024 -0.001 -0.026                     
## education   -0.215 -0.036 -0.110  0.005              
## condIgnornc -0.049  0.000  0.000  0.000  0.000       
## condKnowldg -0.049  0.000  0.000  0.000  0.000  0.500
# linear pre-registered exclusions
summary(linear_no[[5]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: know_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond
##    Data: final_long
## 
## REML criterion at convergence: 79509.8
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.3121 -0.7838 -0.1363  0.8679  2.7901 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)   20.86   4.567  
##  vignette             (Intercept)  204.98  14.317  
##  Residual                         1264.08  35.554  
## Number of obs: 7961, groups:  person_code:vignette, 54; vignette, 3
## 
## Fixed effects:
##                 Estimate Std. Error         df t value Pr(>|t|)    
## (Intercept)     48.32923    8.72076    2.43335   5.542   0.0197 *  
## compYes          1.10405    1.16784  606.85509   0.945   0.3448    
## age             -0.03521    0.03315 4522.77186  -1.062   0.2883    
## gender2male     -0.32119    0.88876 7653.99771  -0.361   0.7178    
## education       -0.31052    0.17252 4395.96422  -1.800   0.0719 .  
## condIgnorance  -22.91682    0.97681 7912.12480 -23.461   <2e-16 ***
## condKnowledge   14.12770    0.97686 7913.61103  14.462   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.096                                   
## age         -0.085  0.146                            
## gender2male -0.028 -0.001 -0.002                     
## education   -0.258 -0.056 -0.101 -0.007              
## condIgnornc -0.056  0.000  0.000  0.000  0.000       
## condKnowldg -0.056  0.001  0.000  0.000  0.000  0.500

Vignette

  • Logistic versus linear: Both show significant interactions between condition and vignette.
  • Exclusions: Both show significant interactions between condition and vignette.
  • The exact pattern of these interactions is different within predictors but generally has the same pattern of results.
View full results
# log updated exclusions
summary(log_exclude[[7]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * vignette
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15807.7  15928.3  -7887.8  15775.7    13879 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.7804 -0.5717 -0.4484  0.7819  3.1287 
## 
## Random effects:
##  Groups                    Name        Variance           Std.Dev.    
##  person_code:(id:vignette) (Intercept) 0.0000000000172891 0.0000041580
##  id:vignette               (Intercept) 0.0000000000007192 0.0000008481
##  vignette                  (Intercept) 0.0000000000000000 0.0000000000
## Number of obs: 13895, groups:  
## person_code:(id:vignette), 13895; id:vignette, 13895; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value          Pr(>|z|)    
## (Intercept)                   0.555020   0.126409   4.391 0.000011300772713 ***
## compYes                       0.016780   0.044844   0.374          0.708268    
## age                           0.003778   0.002004   1.886          0.059360 .  
## gender2male                  -0.087674   0.042794  -2.049          0.040489 *  
## education                    -0.016600   0.007561  -2.196          0.028122 *  
## condIgnorance                -1.604292   0.080023 -20.048           < 2e-16 ***
## condKnowledge                 0.503172   0.076198   6.603 0.000000000040161 ***
## vignetteEmma                 -1.930434   0.084439 -22.862           < 2e-16 ***
## vignetteGerald               -0.390320   0.073064  -5.342 0.000000091833949 ***
## condIgnorance:vignetteEmma    0.977499   0.132791   7.361 0.000000000000182 ***
## condKnowledge:vignetteEmma    0.395026   0.114528   3.449          0.000562 ***
## condIgnorance:vignetteGerald  0.223192   0.113776   1.962          0.049800 *  
## condKnowledge:vignetteGerald  0.021780   0.106168   0.205          0.837455    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# log pre-registered exclusions
summary(log_no[[7]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * vignette
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   9846.2   9959.8  -4907.1   9814.2     8897 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1557 -0.5279 -0.4235  0.7544  3.2541 
## 
## Random effects:
##  Groups                    Name        Variance            Std.Dev.    
##  person_code:(id:vignette) (Intercept) 0.00000000000000000 0.0000000000
##  id:vignette               (Intercept) 0.00000000000003909 0.0000001977
##  vignette                  (Intercept) 0.00000000000000000 0.0000000000
## Number of obs: 8913, groups:  
## person_code:(id:vignette), 8913; id:vignette, 8913; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value      Pr(>|z|)    
## (Intercept)                   0.448179   0.157312   2.849      0.004386 ** 
## compYes                       0.071208   0.055565   1.282      0.200009    
## age                           0.001945   0.001971   0.987      0.323752    
## gender2male                  -0.017390   0.052887  -0.329      0.742293    
## education                    -0.015451   0.010070  -1.534      0.124966    
## condIgnorance                -1.650724   0.101805 -16.215       < 2e-16 ***
## condKnowledge                 0.800838   0.097344   8.227       < 2e-16 ***
## vignetteEmma                 -1.997144   0.108600 -18.390       < 2e-16 ***
## vignetteGerald               -0.324747   0.090511  -3.588      0.000333 ***
## condIgnorance:vignetteEmma    1.048154   0.171915   6.097 0.00000000108 ***
## condKnowledge:vignetteEmma    0.218717   0.147265   1.485      0.137494    
## condIgnorance:vignetteGerald  0.165680   0.144717   1.145      0.252269    
## condKnowledge:vignetteGerald -0.216100   0.134482  -1.607      0.108075    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# linear updated exclusions
summary(linear_exclude[[6]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: know_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond * vignette
##    Data: final_long
## 
## REML criterion at convergence: 123474.7
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.3300 -0.6937 -0.2529  0.8658  2.5602 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)   34.9    5.907  
##  vignette             (Intercept)  112.3   10.596  
##  Residual                         1302.1   36.084  
## Number of obs: 12325, groups:  person_code:vignette, 108; vignette, 3
## 
## Fixed effects:
##                                      Estimate       Std. Error               df
## (Intercept)                     63.6491120328    10.9056667857     0.0000003184
## compYes                         -0.1585251266     1.0118597217  1262.5701557380
## age                             -0.0471365522     0.0388419350  6404.4544013999
## gender2male                     -0.3060476336     0.7367621855 12251.4458417057
## education                       -0.3309000164     0.1335614390 10325.0479322773
## condIgnorance                  -33.0611724830     1.3871976459 12222.8735524039
## condKnowledge                    9.6693642569     1.3734366746 12223.2342014353
## vignetteEmma                   -36.1750949383    15.1201244986     0.0000002942
## vignetteGerald                  -7.6123993344    15.1198092726     0.0000002941
## condIgnorance:vignetteEmma      25.5616991268     1.9565836922 12225.4808225861
## condKnowledge:vignetteEmma       6.3093059824     1.9483982067 12227.4572082872
## condIgnorance:vignetteGerald     6.3550643077     1.9527921309 12219.9613719969
## condKnowledge:vignetteGerald     1.5564721118     1.9516682843 12220.5235542453
##                              t value         Pr(>|t|)    
## (Intercept)                    5.836          1.00000    
## compYes                       -0.157          0.87553    
## age                           -1.214          0.22497    
## gender2male                   -0.415          0.67786    
## education                     -2.478          0.01325 *  
## condIgnorance                -23.833          < 2e-16 ***
## condKnowledge                  7.040 0.00000000000202 ***
## vignetteEmma                  -2.393          1.00000    
## vignetteGerald                -0.503          1.00000    
## condIgnorance:vignetteEmma    13.064          < 2e-16 ***
## condKnowledge:vignetteEmma     3.238          0.00121 ** 
## condIgnorance:vignetteGerald   3.254          0.00114 ** 
## condKnowledge:vignetteGerald   0.798          0.42517    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# linear pre-registered exclusions
summary(linear_no[[6]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: know_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond * vignette
##    Data: final_long
## 
## REML criterion at convergence: 79315.4
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.4492 -0.6723 -0.2367  0.8142  2.5545 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)   22.42   4.734  
##  vignette             (Intercept)  168.55  12.983  
##  Residual                         1238.46  35.192  
## Number of obs: 7961, groups:  person_code:vignette, 54; vignette, 3
## 
## Fixed effects:
##                                Estimate Std. Error         df t value
## (Intercept)                    62.62863   13.34194 7831.79354   4.694
## compYes                         0.86673    1.16506  657.59378   0.744
## age                            -0.04477    0.03290 4792.01211  -1.361
## gender2male                    -0.52381    0.88077 7697.21219  -0.595
## education                      -0.24715    0.17125 4577.13556  -1.443
## condIgnorance                 -33.31477    1.68387 7907.13101 -19.785
## condKnowledge                  14.22666    1.66097 7908.92749   8.565
## vignetteEmma                  -37.32864   18.51654 7905.01302  -2.016
## vignetteGerald                 -7.01338   18.51535 7905.70317  -0.379
## condIgnorance:vignetteEmma     26.39881    2.37794 7912.72612  11.102
## condKnowledge:vignetteEmma      2.99716    2.36189 7913.98767   1.269
## condIgnorance:vignetteGerald    4.68784    2.36824 7906.36535   1.979
## condKnowledge:vignetteGerald   -3.20824    2.36675 7906.17704  -1.356
##                                Pr(>|t|)    
## (Intercept)                  0.00000272 ***
## compYes                          0.4572    
## age                              0.1736    
## gender2male                      0.5520    
## education                        0.1490    
## condIgnorance                   < 2e-16 ***
## condKnowledge                   < 2e-16 ***
## vignetteEmma                     0.0438 *  
## vignetteGerald                   0.7049    
## condIgnorance:vignetteEmma      < 2e-16 ***
## condKnowledge:vignetteEmma       0.2045    
## condIgnorance:vignetteGerald     0.0478 *  
## condKnowledge:vignetteGerald     0.1753    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (nloptwrap) convergence code: 0 (OK)
## unable to evaluate scaled gradient
## Model failed to converge: degenerate  Hessian with 1 negative eigenvalues

Turk

  • Logistic versus linear: In this case, logistic models do not show an interaction between the Turk sample and the condition. Linear models show an interaction.
  • Exclusions: exclusions do not appear to change the pattern of results within each linking function.
  • The effect for linear models is graphed below. This result occurs because there is slightly more sensitivity to show the differences between Gettier and Knowledge in Turk than the other sample. A visualization of the distributions shows that more people pick zero in the regular sample than Turk.
View full result
# log updated exclusions
summary(log_exclude[[8]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * turk
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15850.2  15948.2  -7912.1  15824.2    13882 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1117 -0.6244 -0.4852  0.7822  3.9470 
## 
## Random effects:
##  Groups                    Name        Variance           Std.Dev.    
##  person_code:(id:vignette) (Intercept) 0.0000000000002108 0.0000004591
##  id:vignette               (Intercept) 0.0000000000024070 0.0000015515
##  vignette                  (Intercept) 0.4494139896495767 0.6703834646
## Number of obs: 13895, groups:  
## person_code:(id:vignette), 13895; id:vignette, 13895; vignette, 3
## 
## Fixed effects:
##                          Estimate Std. Error z value Pr(>|z|)    
## (Intercept)            -0.0743861  0.4058531  -0.183  0.85458    
## compYes                -0.0330383  0.0463320  -0.713  0.47580    
## age                    -0.0001138  0.0022169  -0.051  0.95906    
## gender2male            -0.1113136  0.0431663  -2.579  0.00992 ** 
## education              -0.0164575  0.0075305  -2.185  0.02886 *  
## condIgnorance          -1.2894721  0.0521660 -24.719  < 2e-16 ***
## condKnowledge           0.5914056  0.0461318  12.820  < 2e-16 ***
## turkTRUE                0.3226120  0.1260957   2.558  0.01051 *  
## condIgnorance:turkTRUE -0.3324922  0.1930846  -1.722  0.08507 .  
## condKnowledge:turkTRUE  0.2986885  0.1735316   1.721  0.08521 .  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.090                                                        
## age         -0.124  0.288                                                 
## gender2male -0.039  0.053 -0.021                                          
## education   -0.233 -0.119 -0.112  0.032                                   
## condIgnornc -0.052  0.006  0.001  0.011  0.007                            
## condKnowldg -0.055 -0.001 -0.002 -0.006 -0.005  0.433                     
## turkTRUE     0.031 -0.182 -0.282 -0.098  0.011  0.158  0.184              
## cndIgn:TRUE  0.013 -0.001  0.001  0.007  0.001 -0.268 -0.119 -0.588       
## cndKnw:TRUE  0.014  0.000  0.002 -0.001  0.002 -0.118 -0.263 -0.654  0.427
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# log pre-registered exclusions
summary(log_no[[8]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * turk
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   9878.3   9970.5  -4926.1   9852.3     8900 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1719 -0.5885 -0.4616  0.7383  4.3004 
## 
## Random effects:
##  Groups                    Name        Variance          Std.Dev.   
##  person_code:(id:vignette) (Intercept) 0.000000000000000 0.000000000
##  id:vignette               (Intercept) 0.000000000002026 0.000001423
##  vignette                  (Intercept) 0.523661794694040 0.723644799
## Number of obs: 8913, groups:  
## person_code:(id:vignette), 8913; id:vignette, 8913; vignette, 3
## 
## Fixed effects:
##                         Estimate Std. Error z value Pr(>|z|)    
## (Intercept)            -0.161654   0.444460  -0.364  0.71608    
## compYes                -0.005201   0.058036  -0.090  0.92859    
## age                    -0.001795   0.002162  -0.830  0.40649    
## gender2male            -0.059996   0.053492  -1.122  0.26204    
## education              -0.016217   0.010029  -1.617  0.10587    
## condIgnorance          -1.353173   0.068607 -19.723  < 2e-16 ***
## condKnowledge           0.744792   0.059787  12.457  < 2e-16 ***
## turkTRUE                0.408205   0.130505   3.128  0.00176 ** 
## condIgnorance:turkTRUE -0.280348   0.198922  -1.409  0.15874    
## condKnowledge:turkTRUE  0.160700   0.178869   0.898  0.36896    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.074                                                        
## age         -0.110  0.276                                                 
## gender2male -0.043  0.042  0.001                                          
## education   -0.282 -0.153 -0.103  0.015                                   
## condIgnornc -0.060  0.009  0.013  0.005  0.002                            
## condKnowldg -0.064 -0.001 -0.003 -0.006 -0.008  0.421                     
## turkTRUE     0.025 -0.209 -0.270 -0.116 -0.005  0.190  0.229              
## cndIgn:TRUE  0.019 -0.002  0.000  0.007  0.002 -0.342 -0.149 -0.586       
## cndKnw:TRUE  0.021  0.000  0.001  0.001  0.003 -0.144 -0.329 -0.652  0.427
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# linear updated exclusions
summary(linear_exclude[[7]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: know_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond * turk
##    Data: final_long
## 
## REML criterion at convergence: 123672.2
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.2367 -0.7893 -0.1609  0.8931  2.7698 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)   32.43   5.695  
##  vignette             (Intercept)  182.13  13.496  
##  Residual                         1321.89  36.358  
## Number of obs: 12325, groups:  person_code:vignette, 108; vignette, 3
## 
## Fixed effects:
##                           Estimate  Std. Error          df t value Pr(>|t|)    
## (Intercept)               49.52458     8.13216     2.33994   6.090  0.01755 *  
## compYes                   -0.41102     1.01706  1255.78125  -0.404  0.68619    
## age                       -0.05609     0.03961  8939.44523  -1.416  0.15680    
## gender2male               -0.23396     0.74291 12292.84701  -0.315  0.75283    
## education                 -0.36273     0.13433 10055.14842  -2.700  0.00694 ** 
## condIgnorance            -21.71751     0.83998 12224.24000 -25.855  < 2e-16 ***
## condKnowledge             12.03232     0.84033 12227.71322  14.319  < 2e-16 ***
## turkTRUE                   8.62820     3.96644    86.66971   2.175  0.03233 *  
## condIgnorance:turkTRUE    -7.76061     2.85394 12210.81748  -2.719  0.00655 ** 
## condKnowledge:turkTRUE     3.05294     2.85177 12205.16782   1.071  0.28440    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.103                                                        
## age         -0.106  0.184                                                 
## gender2male -0.025  0.005 -0.017                                          
## education   -0.214 -0.037 -0.110  0.005                                   
## condIgnornc -0.052  0.000  0.000  0.000  0.000                            
## condKnowldg -0.052  0.000  0.000  0.000  0.000  0.500                     
## turkTRUE     0.005 -0.099 -0.157 -0.048  0.008  0.106  0.106              
## cndIgn:TRUE  0.015  0.000  0.000  0.000  0.000 -0.294 -0.147 -0.360       
## cndKnw:TRUE  0.015  0.000  0.000  0.000  0.000 -0.147 -0.295 -0.359  0.500
# linear pre-registered exclusions
summary(linear_no[[7]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: know_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond * turk
##    Data: final_long
## 
## REML criterion at convergence: 79483.8
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.2831 -0.7844 -0.1323  0.8648  2.7717 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)   18.48   4.299  
##  vignette             (Intercept)  205.62  14.339  
##  Residual                         1262.84  35.536  
## Number of obs: 7961, groups:  person_code:vignette, 54; vignette, 3
## 
## Fixed effects:
##                          Estimate Std. Error         df t value Pr(>|t|)    
## (Intercept)              48.21268    8.72957    2.43048   5.523  0.01991 *  
## compYes                   0.76590    1.16822  608.31613   0.656  0.51232    
## age                      -0.04488    0.03357 6812.15964  -1.337  0.18128    
## gender2male              -0.44964    0.89100 7881.38786  -0.505  0.61382    
## education                -0.31490    0.17195 3941.88302  -1.831  0.06713 .  
## condIgnorance           -21.90260    1.04918 7910.63850 -20.876  < 2e-16 ***
## condKnowledge            13.97736    1.04938 7913.05007  13.320  < 2e-16 ***
## turkTRUE                  7.77227    3.33795   43.43117   2.328  0.02461 *  
## condIgnorance:turkTRUE   -7.57655    2.86480 7901.25946  -2.645  0.00819 ** 
## condKnowledge:turkTRUE    1.10771    2.86284 7895.37692   0.387  0.69882    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.095                                                        
## age         -0.084  0.170                                                 
## gender2male -0.028  0.012  0.012                                          
## education   -0.257 -0.057 -0.100 -0.007                                   
## condIgnornc -0.060  0.001  0.000  0.000  0.000                            
## condKnowldg -0.060  0.001  0.000  0.000  0.000  0.500                     
## turkTRUE    -0.005 -0.128 -0.152 -0.076 -0.001  0.157  0.157              
## cndIgn:TRUE  0.022  0.000  0.000  0.000  0.000 -0.366 -0.183 -0.429       
## cndKnw:TRUE  0.022  0.000  0.000  0.000  0.000 -0.183 -0.367 -0.429  0.500
df <- linear_exclude[[7]]@frame

ggplot(df, aes(turk, know_vas, fill = cond)) +
  stat_summary(fun = mean,
               geom = "bar",
               position = "dodge") +
  stat_summary(fun.data = mean_cl_normal,
               geom = "errorbar", 
               position = position_dodge(width = 0.90),
               width = .2) + 
  theme_classic() + 
  xlab("Turk Sample") + 
  ylab("Knowledge Rating") + 
  scale_fill_discrete(name = "Condition")

ggplot(df, aes(turk, know_vas)) + 
  geom_violin() + 
  theme_classic() + 
  xlab("Turk Sample") + 
  ylab("Knowledge Rating")

Reasonable

Condition

  • Logistic versus linear: The pattern, direction, and magnitude of results is the same.
  • Exclusions: The pattern, direction, and magnitude of results is the same.
View full results
# log updated exclusions
summary(log_exclude[[14]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7047.1   7122.6  -3513.6   7027.1    13964 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -6.6491  0.2093  0.2544  0.3073  0.5823 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.001461 0.03822 
##  id:vignette               (Intercept) 0.007296 0.08542 
##  vignette                  (Intercept) 0.076656 0.27687 
## Number of obs: 13974, groups:  
## person_code:(id:vignette), 13974; id:vignette, 13974; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value          Pr(>|z|)    
## (Intercept)    1.839819   0.248928   7.391 0.000000000000146 ***
## compYes        0.251565   0.072504   3.470          0.000521 ***
## age           -0.004968   0.003209  -1.548          0.121581    
## gender2male   -0.181153   0.070632  -2.565          0.010326 *  
## education      0.057713   0.011926   4.839 0.000001303215506 ***
## condIgnorance -0.400271   0.075940  -5.271 0.000000135772713 ***
## condKnowledge  0.425613   0.090671   4.694 0.000002678579960 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.202                                   
## age         -0.297  0.201                            
## gender2male -0.085  0.014 -0.095                     
## education   -0.599 -0.098 -0.097  0.030              
## condIgnornc -0.178 -0.005  0.002  0.002  0.001       
## condKnowldg -0.154  0.003  0.003 -0.002  0.007  0.487
# log pre-registered exclusions
summary(log_no[[14]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   4680.2   4751.2  -2330.1   4660.2     8963 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -7.0840  0.2131  0.2619  0.3188  0.7567 
## 
## Random effects:
##  Groups                    Name        Variance      Std.Dev. 
##  person_code:(id:vignette) (Intercept) 0.00065586043 0.0256098
##  id:vignette               (Intercept) 0.00000001395 0.0001181
##  vignette                  (Intercept) 0.11065854035 0.3326538
## Number of obs: 8973, groups:  
## person_code:(id:vignette), 8973; id:vignette, 8973; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value     Pr(>|z|)    
## (Intercept)    1.554670   0.291704   5.330 0.0000000984 ***
## compYes        0.121551   0.087029   1.397     0.162510    
## age           -0.006759   0.002591  -2.608     0.009097 ** 
## gender2male   -0.153204   0.084696  -1.809     0.070471 .  
## education      0.084209   0.014909   5.648 0.0000000162 ***
## condIgnorance -0.308554   0.092775  -3.326     0.000882 ***
## condKnowledge  0.429622   0.108894   3.945 0.0000796927 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.152                                   
## age         -0.184  0.143                            
## gender2male -0.089 -0.004 -0.056                     
## education   -0.627 -0.119 -0.115  0.002              
## condIgnornc -0.180 -0.005  0.000  0.002  0.002       
## condKnowldg -0.158  0.002 -0.001 -0.005  0.009  0.482
# linear updated exclusions
summary(linear_exclude[[12]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: reason_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond
##    Data: final_long
## 
## REML criterion at convergence: 111450.1
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -4.2604 -0.2205  0.3649  0.5607  2.1477 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)  33.403   5.780  
##  vignette             (Intercept)   4.498   2.121  
##  Residual                         484.285  22.006  
## Number of obs: 12331, groups:  person_code:vignette, 108; vignette, 3
## 
## Fixed effects:
##                  Estimate  Std. Error          df t value         Pr(>|t|)    
## (Intercept)      82.54112     1.93942     8.13361  42.560 0.00000000007562 ***
## compYes           0.73805     0.66528  3656.02132   1.109           0.2673    
## age               0.01920     0.02426 10691.79619   0.791           0.4288    
## gender2male      -1.15252     0.45093 12317.81232  -2.556           0.0106 *  
## education         0.17174     0.08245 12002.02033   2.083           0.0373 *  
## condIgnorance    -3.36510     0.48594 12226.71495  -6.925 0.00000000000458 ***
## condKnowledge     1.19634     0.48600 12228.01231   2.462           0.0138 *  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.277                                   
## age         -0.272  0.145                            
## gender2male -0.062 -0.007 -0.019                     
## education   -0.553 -0.028 -0.111  0.004              
## condIgnornc -0.125  0.000  0.000  0.000  0.000       
## condKnowldg -0.125  0.000  0.000  0.000  0.000  0.500
# linear pre-registered exclusions
summary(linear_no[[12]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: reason_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond
##    Data: final_long
## 
## REML criterion at convergence: 71943
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -4.2647 -0.2465  0.3656  0.5597  2.2324 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)  55.366   7.441  
##  vignette             (Intercept)   5.225   2.286  
##  Residual                         481.008  21.932  
## Number of obs: 7966, groups:  person_code:vignette, 54; vignette, 3
## 
## Fixed effects:
##                 Estimate Std. Error         df t value       Pr(>|t|)    
## (Intercept)     81.25953    2.41740    8.30856  33.614 0.000000000358 ***
## compYes          0.69844    0.81085 4583.74670   0.861         0.3891    
## age             -0.02399    0.02102 7863.88774  -1.141         0.2537    
## gender2male     -1.13277    0.55242 7950.42605  -2.051         0.0403 *  
## education        0.23547    0.10979 7776.50097   2.145         0.0320 *  
## condIgnorance   -3.26289    0.60258 7909.17481  -5.415 0.000000063118 ***
## condKnowledge    1.53964    0.60271 7909.70465   2.555         0.0107 *  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.246                                   
## age         -0.197  0.114                            
## gender2male -0.064 -0.010  0.013                     
## education   -0.603 -0.033 -0.089 -0.005              
## condIgnornc -0.125  0.000  0.000  0.000  0.000       
## condKnowldg -0.125  0.000  0.000  0.000  0.000  0.500

Vignette

  • Logistic versus linear: Generally, each version showed an interaction of condition and vignette.
  • Exclusions: Logistic models both showed interactions for each exclusion type. However, the linear model with the pre-registered exclusions did not show an interaction between condition and vignette. This model has significantly less power than the updated exclusions, as only has 8000 data points (~2600+ participants) versus 12000 + data points (~4100+ participants).
View full results
# log updated exclusions
summary(log_exclude[[15]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * vignette
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7025.8   7146.5  -3496.9   6993.8    13958 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -7.4722  0.2020  0.2562  0.3173  0.5561 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.000000 0.00000 
##  id:vignette               (Intercept) 0.002124 0.04609 
##  vignette                  (Intercept) 0.000000 0.00000 
## Number of obs: 13974, groups:  
## person_code:(id:vignette), 13974; id:vignette, 13974; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value        Pr(>|z|)    
## (Intercept)                   2.425110   0.222940  10.878         < 2e-16 ***
## compYes                       0.248903   0.072555   3.431        0.000602 ***
## age                          -0.005006   0.003218  -1.556        0.119764    
## gender2male                  -0.181886   0.070666  -2.574        0.010056 *  
## education                     0.058594   0.011936   4.909 0.0000009160223 ***
## condIgnorance                -0.863705   0.160582  -5.379 0.0000000750731 ***
## condKnowledge                 0.390875   0.204642   1.910        0.056127 .  
## vignetteEmma                 -1.099565   0.155615  -7.066 0.0000000000016 ***
## vignetteGerald               -0.522306   0.168276  -3.104        0.001910 ** 
## condIgnorance:vignetteEmma    0.741195   0.196886   3.765        0.000167 ***
## condKnowledge:vignetteEmma    0.256617   0.245839   1.044        0.296559    
## condIgnorance:vignetteGerald  0.424379   0.210062   2.020        0.043357 *  
## condKnowledge:vignetteGerald -0.237181   0.255828  -0.927        0.353869    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# log pre-registered exclusions
summary(log_no[[15]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * vignette
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   4667.6   4781.2  -2317.8   4635.6     8957 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -8.7634  0.2141  0.2683  0.3201  0.7372 
## 
## Random effects:
##  Groups                    Name        Variance  Std.Dev. 
##  person_code:(id:vignette) (Intercept) 4.000e-14 2.000e-07
##  id:vignette               (Intercept) 4.000e-14 2.000e-07
##  vignette                  (Intercept) 3.388e-47 5.821e-24
## Number of obs: 8973, groups:  
## person_code:(id:vignette), 8973; id:vignette, 8973; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value Pr(>|z|)    
## (Intercept)                   2.059901   0.255214   8.071 6.96e-16 ***
## compYes                       0.117747   0.087086   1.352  0.17635    
## age                          -0.006854   0.002595  -2.641  0.00826 ** 
## gender2male                  -0.149654   0.084732  -1.766  0.07736 .  
## education                     0.084600   0.014902   5.677 1.37e-08 ***
## condIgnorance                -0.569533   0.193958  -2.936  0.00332 ** 
## condKnowledge                 0.770953   0.263458   2.926  0.00343 ** 
## vignetteEmma                 -0.995789   0.182261  -5.464 4.67e-08 ***
## vignetteGerald               -0.450185   0.196305  -2.293  0.02183 *  
## condIgnorance:vignetteEmma    0.456683   0.238897   1.912  0.05592 .  
## condKnowledge:vignetteEmma   -0.215650   0.309199  -0.697  0.48552    
## condIgnorance:vignetteGerald  0.180623   0.253647   0.712  0.47640    
## condKnowledge:vignetteGerald -0.671010   0.319988  -2.097  0.03600 *  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# linear updated exclusions
summary(linear_exclude[[13]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: reason_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond * vignette
##    Data: final_long
## 
## REML criterion at convergence: 111421.4
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -4.2969 -0.2239  0.3612  0.5692  2.1092 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)  33.469   5.785  
##  vignette             (Intercept)   9.252   3.042  
##  Residual                         484.005  22.000  
## Number of obs: 12331, groups:  person_code:vignette, 108; vignette, 3
## 
## Fixed effects:
##                                 Estimate  Std. Error          df t value
## (Intercept)                     84.16532     3.51892  6505.36624  23.918
## compYes                          0.73661     0.66519  3664.90648   1.107
## age                              0.01918     0.02426 10694.81320   0.791
## gender2male                     -1.16314     0.45090 12313.79453  -2.580
## education                        0.17602     0.08246 11996.71032   2.135
## condIgnorance                   -4.84758     0.84596 12224.28429  -5.730
## condKnowledge                    1.68797     0.83740 12224.08360   2.016
## vignetteEmma                    -5.03101     4.60179  5888.85314  -1.093
## vignetteGerald                  -0.01318     4.60122  5889.51260  -0.003
## condIgnorance:vignetteEmma       3.04640     1.19323 12225.73295   2.553
## condKnowledge:vignetteEmma      -0.21466     1.18802 12226.32527  -0.181
## condIgnorance:vignetteGerald     1.37575     1.19056 12222.90307   1.156
## condKnowledge:vignetteGerald    -1.28468     1.18963 12223.18341  -1.080
##                                  Pr(>|t|)    
## (Intercept)                       < 2e-16 ***
## compYes                            0.2682    
## age                                0.4292    
## gender2male                        0.0099 ** 
## education                          0.0328 *  
## condIgnorance                0.0000000103 ***
## condKnowledge                      0.0439 *  
## vignetteEmma                       0.2743    
## vignetteGerald                     0.9977    
## condIgnorance:vignetteEmma         0.0107 *  
## condKnowledge:vignetteEmma         0.8566    
## condIgnorance:vignetteGerald       0.2479    
## condKnowledge:vignetteGerald       0.2802    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (nloptwrap) convergence code: 0 (OK)
## Model is nearly unidentifiable: large eigenvalue ratio
##  - Rescale variables?
# linear pre-registered exclusions
summary(linear_no[[13]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: reason_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond * vignette
##    Data: final_long
## 
## REML criterion at convergence: 71919.1
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -4.2507 -0.2474  0.3620  0.5689  2.2114 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)  55.43    7.445  
##  vignette             (Intercept)  11.75    3.428  
##  Residual                         481.03   21.932  
## Number of obs: 7966, groups:  person_code:vignette, 54; vignette, 3
## 
## Fixed effects:
##                                      Estimate       Std. Error               df
## (Intercept)                    83.19236731081    4.27752426252    0.00000006172
## compYes                         0.68268359425    0.81098410528 4587.17902672185
## age                            -0.02408865268    0.02103202742 7859.68291505886
## gender2male                    -1.13072158873    0.55279163395 7946.56926150827
## education                       0.23739661911    0.10989376237 7770.25000475278
## condIgnorance                  -3.55277098209    1.04985498625 7905.58201463511
## condKnowledge                   2.50288080765    1.03530481729 7905.98212640020
## vignetteEmma                   -5.62445886327    5.56692185351    0.00000004426
## vignetteGerald                 -0.23120400095    5.56469801642    0.00000004419
## condIgnorance:vignetteEmma      1.19673534751    1.48284561261 7907.31634547197
## condKnowledge:vignetteEmma     -0.86268044461    1.47265965270 7907.62722968159
## condIgnorance:vignetteGerald   -0.33666307818    1.47566823976 7905.12887209355
## condKnowledge:vignetteGerald   -2.04740546590    1.47497461086 7905.18687022792
##                              t value Pr(>|t|)    
## (Intercept)                   19.449 0.999999    
## compYes                        0.842 0.399946    
## age                           -1.145 0.252107    
## gender2male                   -2.045 0.040841 *  
## education                      2.160 0.030785 *  
## condIgnorance                 -3.384 0.000718 ***
## condKnowledge                  2.418 0.015649 *  
## vignetteEmma                  -1.010 1.000000    
## vignetteGerald                -0.042 1.000000    
## condIgnorance:vignetteEmma     0.807 0.419660    
## condKnowledge:vignetteEmma    -0.586 0.558028    
## condIgnorance:vignetteGerald  -0.228 0.819541    
## condKnowledge:vignetteGerald  -1.388 0.165147    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Turk

  • Logistic versus linear: Generally, this shows the same pattern, direction, and magnitude of results.
  • Exclusions: Given the dichotomization of p-values, the linear model with pre-registered exclusions does show one significant interaction between Turk and condition. However, the estimates each overlap within confidence interval, magnitude, and direction.
View full results
# log updated exclusions
summary(log_exclude[[16]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * turk
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7017.4   7115.5  -3495.7   6991.4    13961 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -8.5047  0.2054  0.2546  0.3087  0.6997 
## 
## Random effects:
##  Groups                    Name        Variance     Std.Dev. 
##  person_code:(id:vignette) (Intercept) 0.0023770476 0.0487550
##  id:vignette               (Intercept) 0.0000001472 0.0003837
##  vignette                  (Intercept) 0.0768855931 0.2772825
## Number of obs: 13974, groups:  
## person_code:(id:vignette), 13974; id:vignette, 13974; vignette, 3
## 
## Fixed effects:
##                         Estimate Std. Error z value   Pr(>|z|)    
## (Intercept)             2.073866   0.251554   8.244    < 2e-16 ***
## compYes                 0.131776   0.075708   1.741   0.081757 .  
## age                    -0.012466   0.003376  -3.692   0.000222 ***
## gender2male            -0.232380   0.071089  -3.269   0.001080 ** 
## education               0.056843   0.011725   4.848 0.00000125 ***
## condIgnorance          -0.378343   0.077451  -4.885 0.00000103 ***
## condKnowledge           0.440103   0.092314   4.767 0.00000187 ***
## turkTRUE                1.385038   0.368873   3.755   0.000173 ***
## condIgnorance:turkTRUE -0.655384   0.427927  -1.532   0.125638    
## condKnowledge:turkTRUE -0.443704   0.514800  -0.862   0.388745    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.251                                                        
## age         -0.338  0.315                                                 
## gender2male -0.109  0.053 -0.039                                          
## education   -0.583 -0.094 -0.088  0.035                                   
## condIgnornc -0.180 -0.003  0.005  0.004  0.001                            
## condKnowldg -0.154  0.002  0.001 -0.003  0.007  0.484                     
## turkTRUE     0.044 -0.113 -0.156 -0.050 -0.003  0.120  0.101              
## cndIgn:TRUE  0.032  0.001  0.001 -0.002 -0.001 -0.181 -0.088 -0.835       
## cndKnw:TRUE  0.027  0.000  0.001  0.001 -0.002 -0.087 -0.179 -0.694  0.598
# log pre-registered exclusions
summary(log_no[[16]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * turk
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   4642.5   4734.8  -2308.2   4616.5     8960 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -9.0531  0.2029  0.2619  0.3207  1.3985 
## 
## Random effects:
##  Groups                    Name        Variance         Std.Dev.   
##  person_code:(id:vignette) (Intercept) 0.00000000001987 0.000004457
##  id:vignette               (Intercept) 0.00727367508328 0.085285843
##  vignette                  (Intercept) 0.11192331507671 0.334549421
## Number of obs: 8973, groups:  
## person_code:(id:vignette), 8973; id:vignette, 8973; vignette, 3
## 
## Fixed effects:
##                         Estimate Std. Error z value      Pr(>|z|)    
## (Intercept)             1.757931   0.291438   6.032 0.00000000162 ***
## compYes                -0.035211   0.089757  -0.392       0.69484    
## age                    -0.011293   0.002441  -4.627 0.00000371868 ***
## gender2male            -0.238548   0.085714  -2.783       0.00538 ** 
## education               0.079720   0.014614   5.455 0.00000004900 ***
## condIgnorance          -0.268770   0.095790  -2.806       0.00502 ** 
## condKnowledge           0.454173   0.111954   4.057 0.00004975103 ***
## turkTRUE                1.515018   0.369668   4.098 0.00004161549 ***
## condIgnorance:turkTRUE -0.760031   0.431834  -1.760       0.07841 .  
## condKnowledge:turkTRUE -0.459598   0.518873  -0.886       0.37575    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.182                                                        
## age         -0.190  0.216                                                 
## gender2male -0.114  0.049 -0.003                                          
## education   -0.621 -0.104 -0.103  0.010                                   
## condIgnornc -0.185 -0.003  0.003  0.005  0.003                            
## condKnowldg -0.160 -0.002 -0.008 -0.008  0.010  0.476                     
## turkTRUE     0.014 -0.105 -0.108 -0.063 -0.016  0.144  0.124              
## cndIgn:TRUE  0.042  0.001  0.000 -0.003 -0.001 -0.222 -0.105 -0.836       
## cndKnw:TRUE  0.034  0.001  0.003  0.003 -0.003 -0.103 -0.216 -0.696  0.596
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# linear updated exclusions
summary(linear_exclude[[14]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: reason_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond * turk
##    Data: final_long
## 
## REML criterion at convergence: 111435.7
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -4.2586 -0.2228  0.3620  0.5617  2.1431 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)  33.489   5.787  
##  vignette             (Intercept)   4.497   2.121  
##  Residual                         484.215  22.005  
## Number of obs: 12331, groups:  person_code:vignette, 108; vignette, 3
## 
## Fixed effects:
##                           Estimate  Std. Error          df t value
## (Intercept)               82.37440     1.94146     8.16506  42.429
## compYes                    0.69705     0.66691  3828.65021   1.045
## age                        0.01684     0.02441 11602.27835   0.690
## gender2male               -1.16566     0.45115 12306.98980  -2.584
## education                  0.17214     0.08244 11995.51729   2.088
## condIgnorance             -3.07832     0.50842 12225.41625  -6.055
## condKnowledge              1.36964     0.50854 12226.99384   2.693
## turkTRUE                   4.89406     3.63052    97.44898   1.348
## condIgnorance:turkTRUE    -3.30979     1.72740 12217.78311  -1.916
## condKnowledge:turkTRUE    -1.99600     1.72596 12216.04908  -1.156
##                               Pr(>|t|)    
## (Intercept)            0.0000000000722 ***
## compYes                        0.29600    
## age                            0.49030    
## gender2male                    0.00978 ** 
## education                      0.03683 *  
## condIgnorance          0.0000000014486 ***
## condKnowledge                  0.00708 ** 
## turkTRUE                       0.18077    
## condIgnorance:turkTRUE         0.05538 .  
## condKnowledge:turkTRUE         0.24752    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.275                                                        
## age         -0.269  0.151                                                 
## gender2male -0.061 -0.005 -0.015                                          
## education   -0.552 -0.028 -0.111  0.003                                   
## condIgnornc -0.131  0.000  0.000  0.000  0.000                            
## condKnowldg -0.131  0.000  0.000  0.000  0.000  0.500                     
## turkTRUE    -0.020 -0.066 -0.104 -0.031  0.006  0.070  0.070              
## cndIgn:TRUE  0.039  0.000  0.000  0.000  0.000 -0.294 -0.147 -0.238       
## cndKnw:TRUE  0.039  0.000  0.000  0.000  0.000 -0.147 -0.295 -0.238  0.500
# linear pre-registered exclusions
summary(linear_no[[14]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: reason_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond * turk
##    Data: final_long
## 
## REML criterion at convergence: 71926.8
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -4.2677 -0.2457  0.3587  0.5630  2.2243 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)  54.909   7.410  
##  vignette             (Intercept)   5.261   2.294  
##  Residual                         480.864  21.929  
## Number of obs: 7966, groups:  person_code:vignette, 54; vignette, 3
## 
## Fixed effects:
##                          Estimate Std. Error         df t value       Pr(>|t|)
## (Intercept)              80.76648    2.42892    8.44710  33.252 0.000000000297
## compYes                   0.63938    0.81200 4721.98086   0.787        0.43107
## age                      -0.02568    0.02107 7935.30911  -1.219        0.22282
## gender2male              -1.15483    0.55265 7939.35699  -2.090        0.03668
## education                 0.23552    0.10977 7761.41301   2.146        0.03194
## condIgnorance            -2.77856    0.64746 7907.64823  -4.291 0.000017961265
## condKnowledge             1.87603    0.64772 7908.35149   2.896        0.00379
## turkTRUE                  7.28019    4.59949   48.16052   1.583        0.12001
## condIgnorance:turkTRUE   -3.61045    1.76800 7904.28369  -2.042        0.04117
## condKnowledge:turkTRUE   -2.50244    1.76665 7903.69215  -1.416        0.15667
##                           
## (Intercept)            ***
## compYes                   
## age                       
## gender2male            *  
## education              *  
## condIgnorance          ***
## condKnowledge          ** 
## turkTRUE                  
## condIgnorance:turkTRUE *  
## condKnowledge:turkTRUE    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.240                                                        
## age         -0.190  0.117                                                 
## gender2male -0.061 -0.008  0.015                                          
## education   -0.600 -0.033 -0.089 -0.005                                   
## condIgnornc -0.133  0.000  0.000  0.000  0.000                            
## condKnowldg -0.133  0.000  0.000  0.000  0.000  0.501                     
## turkTRUE    -0.088 -0.059 -0.067 -0.033 -0.001  0.070  0.070              
## cndIgn:TRUE  0.049  0.000  0.000  0.000  0.000 -0.366 -0.183 -0.192       
## cndKnw:TRUE  0.049  0.000  0.000  0.000  0.000 -0.184 -0.367 -0.192  0.500

Luck

Condition

  • Logistic versus linear: Models show the same results.
  • Exclusions: Models show the same results.
View full results
# log updated exclusions
summary(log_exclude[[22]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond
##    Data: final_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15896.2  15970.6  -7938.1  15876.2    12630 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1575 -0.8039 -0.4574  0.9076  2.2438 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.009841 0.09920 
##  id:vignette               (Intercept) 0.004391 0.06626 
##  vignette                  (Intercept) 0.373469 0.61112 
## Number of obs: 12640, groups:  
## person_code:(id:vignette), 12640; id:vignette, 12640; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value  Pr(>|z|)    
## (Intercept)   -0.246316   0.372248  -0.662    0.5082    
## compYes       -0.180694   0.043976  -4.109 0.0000398 ***
## age            0.004153   0.001967   2.111    0.0348 *  
## gender2male    0.007267   0.042185   0.172    0.8632    
## education     -0.031096   0.007485  -4.155 0.0000326 ***
## condIgnorance  1.027734   0.047414  21.676   < 2e-16 ***
## condKnowledge  0.935679   0.047106  19.863   < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.079                                   
## age         -0.116  0.191                            
## gender2male -0.029  0.014 -0.086                     
## education   -0.253 -0.116 -0.109  0.023              
## condIgnornc -0.064 -0.007  0.018  0.002 -0.014       
## condKnowldg -0.062 -0.012  0.009 -0.001 -0.017  0.525
# log pre-registered exclusions
summary(log_no[[22]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond
##    Data: final_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  10154.6  10224.5  -5067.3  10134.6     8044 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1437 -0.8239 -0.4450  0.9135  2.3238 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.007540 0.08683 
##  id:vignette               (Intercept) 0.005717 0.07561 
##  vignette                  (Intercept) 0.336196 0.57982 
## Number of obs: 8054, groups:  
## person_code:(id:vignette), 8054; id:vignette, 8054; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value    Pr(>|z|)    
## (Intercept)   -0.273730   0.365110  -0.750     0.45342    
## compYes       -0.280924   0.053476  -5.253 0.000000149 ***
## age            0.003280   0.001852   1.771     0.07656 .  
## gender2male    0.072560   0.051339   1.413     0.15755    
## education     -0.028566   0.009833  -2.905     0.00367 ** 
## condIgnorance  1.036935   0.059455  17.441     < 2e-16 ***
## condKnowledge  1.030595   0.059157  17.421     < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.062                                   
## age         -0.109  0.165                            
## gender2male -0.035 -0.011 -0.065                     
## education   -0.336 -0.163 -0.104 -0.004              
## condIgnornc -0.082 -0.018  0.018  0.014 -0.012       
## condKnowldg -0.079 -0.022  0.014  0.004 -0.019  0.530
# linear updated exclusions
summary(linear_exclude[[19]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: luck_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond
##    Data: final_luck
## 
## REML criterion at convergence: 115897.9
## 
## Scaled residuals: 
##      Min       1Q   Median       3Q      Max 
## -2.20047 -0.82864 -0.05521  0.88539  2.41224 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)   51.54   7.179  
##  vignette             (Intercept)  174.48  13.209  
##  Residual                         1385.09  37.217  
## Number of obs: 11492, groups:  person_code:vignette, 108; vignette, 3
## 
## Fixed effects:
##                  Estimate  Std. Error          df t value Pr(>|t|)    
## (Intercept)      40.54639     8.03158     2.40667   5.048  0.02497 *  
## compYes          -2.72419     1.11071  1652.21090  -2.453  0.01428 *  
## age               0.11542     0.04174  7540.19056   2.765  0.00571 ** 
## gender2male      -0.53176     0.78840 11471.61637  -0.674  0.50002    
## education        -0.24143     0.14382 10385.21417  -1.679  0.09324 .  
## condIgnorance    17.92923     0.85423 11400.80415  20.989  < 2e-16 ***
## condKnowledge    18.57941     0.85105 11402.71373  21.831  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.113                                   
## age         -0.114  0.158                            
## gender2male -0.025 -0.006 -0.020                     
## education   -0.233 -0.030 -0.111  0.002              
## condIgnornc -0.054  0.002  0.005  0.001  0.001       
## condKnowldg -0.054  0.004  0.001 -0.001 -0.001  0.503
# linear pre-registered exclusions
summary(linear_no[[19]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: luck_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond
##    Data: final_luck
## 
## REML criterion at convergence: 74590.4
## 
## Scaled residuals: 
##      Min       1Q   Median       3Q      Max 
## -2.14158 -0.82815 -0.06471  0.88672  2.44212 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)   48.59   6.971  
##  vignette             (Intercept)  151.21  12.297  
##  Residual                         1349.78  36.739  
## Number of obs: 7417, groups:  person_code:vignette, 54; vignette, 3
## 
## Fixed effects:
##                 Estimate Std. Error         df t value Pr(>|t|)    
## (Intercept)     38.04904    7.75370    2.71634   4.907   0.0204 *  
## compYes         -2.89810    1.32303 1321.58037  -2.190   0.0287 *  
## age              0.08402    0.03532 6267.16102   2.379   0.0174 *  
## gender2male     -0.07517    0.95661 7372.66695  -0.079   0.9374    
## education       -0.10216    0.18776 5977.08222  -0.544   0.5864    
## condIgnorance   17.51188    1.05058 7369.73658  16.669   <2e-16 ***
## condKnowledge   19.95222    1.04543 7372.04850  19.085   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.124                                   
## age         -0.102  0.129                            
## gender2male -0.032 -0.010  0.006                     
## education   -0.319 -0.042 -0.095 -0.010              
## condIgnornc -0.069  0.002  0.001  0.002  0.003       
## condKnowldg -0.069  0.003  0.000  0.000  0.000  0.504

Vignette

  • Logistic versus linear: Models show the same pattern of interaction results.
  • Exclusions: Models show the same pattern of interaction results.
View full results
# log updated exclusions
summary(log_exclude[[23]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * vignette
##    Data: final_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15458.4  15577.5  -7713.2  15426.4    12624 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1894 -1.0118 -0.2784  0.8435  3.7019 
## 
## Random effects:
##  Groups                    Name        Variance  Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.0004629 0.02151 
##  id:vignette               (Intercept) 0.0015660 0.03957 
##  vignette                  (Intercept) 0.0000000 0.00000 
## Number of obs: 12640, groups:  
## person_code:(id:vignette), 12640; id:vignette, 12640; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value           Pr(>|z|)
## (Intercept)                   0.907304   0.128577   7.056 0.0000000000017077
## compYes                      -0.195308   0.044696  -4.370 0.0000124396016865
## age                           0.004152   0.002002   2.074             0.0381
## gender2male                  -0.004211   0.042725  -0.099             0.9215
## education                    -0.031082   0.007607  -4.086 0.0000439445273061
## condIgnorance                 0.083929   0.078418   1.070             0.2845
## condKnowledge                 0.723553   0.082462   8.774            < 2e-16
## vignetteEmma                 -2.851015   0.110619 -25.773            < 2e-16
## vignetteGerald               -1.070444   0.078819 -13.581            < 2e-16
## condIgnorance:vignetteEmma    2.527798   0.135060  18.716            < 2e-16
## condKnowledge:vignetteEmma    1.067342   0.138234   7.721 0.0000000000000115
## condIgnorance:vignetteGerald  0.723264   0.110503   6.545 0.0000000000594091
## condKnowledge:vignetteGerald  0.094904   0.113794   0.834             0.4043
##                                 
## (Intercept)                  ***
## compYes                      ***
## age                          *  
## gender2male                     
## education                    ***
## condIgnorance                   
## condKnowledge                ***
## vignetteEmma                 ***
## vignetteGerald               ***
## condIgnorance:vignetteEmma   ***
## condKnowledge:vignetteEmma   ***
## condIgnorance:vignetteGerald ***
## condKnowledge:vignetteGerald    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# log pre-registered exclusions
summary(log_no[[23]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * vignette
##    Data: final_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   9914.3  10026.2  -4941.2   9882.3     8038 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.3215 -0.9978 -0.3065  0.8821  3.3888 
## 
## Random effects:
##  Groups                    Name        Variance      Std.Dev. 
##  person_code:(id:vignette) (Intercept) 0.00000006354 0.0002521
##  id:vignette               (Intercept) 0.00062112104 0.0249223
##  vignette                  (Intercept) 0.00000000000 0.0000000
## Number of obs: 8054, groups:  
## person_code:(id:vignette), 8054; id:vignette, 8054; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value     Pr(>|z|)    
## (Intercept)                   0.726340   0.157160   4.622 0.0000038069 ***
## compYes                      -0.310590   0.054287  -5.721 0.0000000106 ***
## age                           0.002672   0.001875   1.425      0.15405    
## gender2male                   0.071172   0.051920   1.371      0.17044    
## education                    -0.027225   0.009964  -2.732      0.00629 ** 
## condIgnorance                 0.216393   0.097719   2.214      0.02680 *  
## condKnowledge                 1.041799   0.103708  10.045      < 2e-16 ***
## vignetteEmma                 -2.420701   0.129448 -18.700      < 2e-16 ***
## vignetteGerald               -0.881518   0.098234  -8.974      < 2e-16 ***
## condIgnorance:vignetteEmma    2.144465   0.161993  13.238      < 2e-16 ***
## condKnowledge:vignetteEmma    0.506074   0.166742   3.035      0.00240 ** 
## condIgnorance:vignetteGerald  0.592601   0.138050   4.293 0.0000176539 ***
## condKnowledge:vignetteGerald -0.135357   0.143460  -0.944      0.34542    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# linear updated exclusions
summary(linear_exclude[[20]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: luck_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond * vignette
##    Data: final_luck
## 
## REML criterion at convergence: 115433.3
## 
## Scaled residuals: 
##      Min       1Q   Median       3Q      Max 
## -2.26629 -0.86904  0.00796  0.89563  2.77338 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)   53.59   7.321  
##  vignette             (Intercept)  467.01  21.610  
##  Residual                         1333.31  36.514  
## Number of obs: 11492, groups:  person_code:vignette, 108; vignette, 3
## 
## Fixed effects:
##                                       Estimate        Std. Error
## (Intercept)                     62.00486143107    21.79313778899
## compYes                         -2.82081270011     1.09714341861
## age                              0.11252721496     0.04104859008
## gender2male                     -0.81708065650     0.77399510385
## education                       -0.19005123193     0.14129038984
## condIgnorance                   -1.45497177057     1.45511632391
## condKnowledge                   12.69760114036     1.44027715415
## vignetteEmma                   -45.25132429500    30.64947387530
## vignetteGerald                 -20.59347789814    30.64900288394
## condIgnorance:vignetteEmma      41.98026886394     2.05257158723
## condKnowledge:vignetteEmma      13.96766969938     2.03635327821
## condIgnorance:vignetteGerald    15.92684810846     2.05659235562
## condKnowledge:vignetteGerald     3.62912065493     2.05084438817
##                                             df t value           Pr(>|t|)    
## (Intercept)                      0.00000009681   2.845            1.00000    
## compYes                       1795.38806869876  -2.571            0.01022 *  
## age                           7889.92575472046   2.741            0.00613 ** 
## gender2male                  11472.40208757258  -1.056            0.29114    
## education                    10514.10400168045  -1.345            0.17862    
## condIgnorance                11397.39945108335  -1.000            0.31738    
## condKnowledge                11394.90458979245   8.816            < 2e-16 ***
## vignetteEmma                     0.00000009468  -1.476            1.00000    
## vignetteGerald                   0.00000009467  -0.672            1.00000    
## condIgnorance:vignetteEmma   11396.83241856327  20.453            < 2e-16 ***
## condKnowledge:vignetteEmma   11399.25746574652   6.859 0.0000000000072852 ***
## condIgnorance:vignetteGerald 11397.55369705422   7.744 0.0000000000000104 ***
## condKnowledge:vignetteGerald 11396.01539164708   1.770            0.07682 .  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (nloptwrap) convergence code: 0 (OK)
## unable to evaluate scaled gradient
## Model failed to converge: degenerate  Hessian with 1 negative eigenvalues
# linear pre-registered exclusions
summary(linear_no[[20]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: luck_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond * vignette
##    Data: final_luck
## 
## REML criterion at convergence: 74307.3
## 
## Scaled residuals: 
##      Min       1Q   Median       3Q      Max 
## -2.29618 -0.85176 -0.02675  0.89929  2.74315 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)   50.03   7.073  
##  vignette             (Intercept)  442.34  21.032  
##  Residual                         1304.79  36.122  
## Number of obs: 7417, groups:  person_code:vignette, 54; vignette, 3
## 
## Fixed effects:
##                                    Estimate     Std. Error             df
## (Intercept)                    56.603331862   21.326736960    0.000001706
## compYes                        -3.207545567    1.306773114 1417.222634030
## age                             0.073279480    0.034777126 6377.409772372
## gender2male                    -0.415736991    0.941569560 7375.205914841
## education                      -0.019231662    0.184896502 6084.032672597
## condIgnorance                   0.407572836    1.793742781 7365.416751346
## condKnowledge                  17.784220887    1.759681574 7364.887326197
## vignetteEmma                  -39.286740994   29.900603963    0.000001648
## vignetteGerald                -18.338432675   29.899410644    0.000001648
## condIgnorance:vignetteEmma     37.407950518    2.533560916 7367.772004482
## condKnowledge:vignetteEmma      6.332728477    2.500206749 7370.563665836
## condIgnorance:vignetteGerald   13.612039683    2.532708855 7365.178581504
## condKnowledge:vignetteGerald    0.414528408    2.523125338 7365.238043876
##                              t value     Pr(>|t|)    
## (Intercept)                    2.654       1.0000    
## compYes                       -2.455       0.0142 *  
## age                            2.107       0.0351 *  
## gender2male                   -0.442       0.6588    
## education                     -0.104       0.9172    
## condIgnorance                  0.227       0.8203    
## condKnowledge                 10.106      < 2e-16 ***
## vignetteEmma                  -1.314       1.0000    
## vignetteGerald                -0.613       1.0000    
## condIgnorance:vignetteEmma    14.765      < 2e-16 ***
## condKnowledge:vignetteEmma     2.533       0.0113 *  
## condIgnorance:vignetteGerald   5.374 0.0000000791 ***
## condKnowledge:vignetteGerald   0.164       0.8695    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Turk

  • Logistic versus linear: All models show no interaction of condition and Turk.
  • Exclusions: All models show no interaction of condition and Turk.
View full results
# log updated exclusions
summary(log_exclude[[24]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * turk
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7017.4   7115.5  -3495.7   6991.4    13961 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -8.5047  0.2054  0.2546  0.3087  0.6997 
## 
## Random effects:
##  Groups                    Name        Variance     Std.Dev. 
##  person_code:(id:vignette) (Intercept) 0.0023770476 0.0487550
##  id:vignette               (Intercept) 0.0000001472 0.0003837
##  vignette                  (Intercept) 0.0768855931 0.2772825
## Number of obs: 13974, groups:  
## person_code:(id:vignette), 13974; id:vignette, 13974; vignette, 3
## 
## Fixed effects:
##                         Estimate Std. Error z value   Pr(>|z|)    
## (Intercept)             2.073866   0.251554   8.244    < 2e-16 ***
## compYes                 0.131776   0.075708   1.741   0.081757 .  
## age                    -0.012466   0.003376  -3.692   0.000222 ***
## gender2male            -0.232380   0.071089  -3.269   0.001080 ** 
## education               0.056843   0.011725   4.848 0.00000125 ***
## condIgnorance          -0.378343   0.077451  -4.885 0.00000103 ***
## condKnowledge           0.440103   0.092314   4.767 0.00000187 ***
## turkTRUE                1.385038   0.368873   3.755   0.000173 ***
## condIgnorance:turkTRUE -0.655384   0.427927  -1.532   0.125638    
## condKnowledge:turkTRUE -0.443704   0.514800  -0.862   0.388745    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.251                                                        
## age         -0.338  0.315                                                 
## gender2male -0.109  0.053 -0.039                                          
## education   -0.583 -0.094 -0.088  0.035                                   
## condIgnornc -0.180 -0.003  0.005  0.004  0.001                            
## condKnowldg -0.154  0.002  0.001 -0.003  0.007  0.484                     
## turkTRUE     0.044 -0.113 -0.156 -0.050 -0.003  0.120  0.101              
## cndIgn:TRUE  0.032  0.001  0.001 -0.002 -0.001 -0.181 -0.088 -0.835       
## cndKnw:TRUE  0.027  0.000  0.001  0.001 -0.002 -0.087 -0.179 -0.694  0.598
# log pre-registered exclusions
summary(log_no[[24]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * turk
##    Data: final_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  10140.0  10230.9  -5057.0  10114.0     8041 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.2125 -0.8309 -0.4430  0.9046  2.4681 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.007294 0.08541 
##  id:vignette               (Intercept) 0.005094 0.07137 
##  vignette                  (Intercept) 0.335650 0.57935 
## Number of obs: 8054, groups:  
## person_code:(id:vignette), 8054; id:vignette, 8054; vignette, 3
## 
## Fixed effects:
##                         Estimate Std. Error z value Pr(>|z|)    
## (Intercept)            -0.384689   0.366283  -1.050  0.29360    
## compYes                -0.212691   0.056350  -3.774  0.00016 ***
## age                     0.006544   0.002087   3.136  0.00172 ** 
## gender2male             0.104048   0.052069   1.998  0.04569 *  
## education              -0.028709   0.009843  -2.917  0.00354 ** 
## condIgnorance           1.070263   0.062995  16.990  < 2e-16 ***
## condKnowledge           1.022183   0.062783  16.281  < 2e-16 ***
## turkTRUE               -0.267159   0.143859  -1.857  0.06330 .  
## condIgnorance:turkTRUE -0.312009   0.190996  -1.634  0.10235    
## condKnowledge:turkTRUE  0.081674   0.186521   0.438  0.66147    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.082                                                        
## age         -0.133  0.274                                                 
## gender2male -0.045  0.038  0.004                                          
## education   -0.335 -0.157 -0.096 -0.005                                   
## condIgnornc -0.087 -0.016  0.018  0.014 -0.012                            
## condKnowldg -0.085 -0.018  0.016  0.007 -0.017  0.528                     
## turkTRUE     0.019 -0.194 -0.249 -0.096  0.002  0.223  0.225              
## cndIgn:TRUE  0.028  0.006 -0.001  0.004  0.002 -0.326 -0.171 -0.687       
## cndKnw:TRUE  0.029  0.008  0.003 -0.001  0.001 -0.175 -0.334 -0.705  0.529
# linear updated exclusions
summary(linear_exclude[[21]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: luck_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond * turk
##    Data: final_luck
## 
## REML criterion at convergence: 115879.9
## 
## Scaled residuals: 
##      Min       1Q   Median       3Q      Max 
## -2.20826 -0.82690 -0.05232  0.88579  2.41214 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)   51.58   7.182  
##  vignette             (Intercept)  174.34  13.204  
##  Residual                         1384.81  37.213  
## Number of obs: 11492, groups:  person_code:vignette, 108; vignette, 3
## 
## Fixed effects:
##                           Estimate  Std. Error          df t value Pr(>|t|)    
## (Intercept)               40.46249     8.03063     2.40926   5.039   0.0250 *  
## compYes                   -2.62626     1.11565  1768.43733  -2.354   0.0187 *  
## age                        0.12229     0.04224  9701.27652   2.895   0.0038 ** 
## gender2male               -0.50662     0.78911 11479.55357  -0.642   0.5209    
## education                 -0.24391     0.14382 10370.91135  -1.696   0.0899 .  
## condIgnorance             18.22654     0.89338 11400.06200  20.402   <2e-16 ***
## condKnowledge             18.32874     0.89151 11402.93140  20.559   <2e-16 ***
## turkTRUE                  -3.87830     4.77559    92.90025  -0.812   0.4188    
## condIgnorance:turkTRUE    -3.60810     3.04945 11382.24992  -1.183   0.2368    
## condKnowledge:turkTRUE     2.84703     2.99023 11378.59470   0.952   0.3411    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.113                                                        
## age         -0.114  0.170                                                 
## gender2male -0.026 -0.002 -0.013                                          
## education   -0.233 -0.031 -0.111  0.002                                   
## condIgnornc -0.057  0.002  0.007  0.000  0.000                            
## condKnowldg -0.056  0.004  0.001 -0.001  0.000  0.504                     
## turkTRUE     0.001 -0.087 -0.141 -0.042  0.010  0.093  0.094              
## cndIgn:TRUE  0.017 -0.002 -0.010  0.003  0.002 -0.293 -0.148 -0.307       
## cndKnw:TRUE  0.017 -0.001  0.001  0.000 -0.002 -0.150 -0.298 -0.314  0.492
# linear pre-registered exclusions
summary(linear_no[[21]])
## Linear mixed model fit by REML. t-tests use Satterthwaite's method [
## lmerModLmerTest]
## Formula: luck_vas ~ (1 | vignette/person_code) + comp + age + gender2 +  
##     education + cond * turk
##    Data: final_luck
## 
## REML criterion at convergence: 74574.9
## 
## Scaled residuals: 
##      Min       1Q   Median       3Q      Max 
## -2.13620 -0.82667 -0.06379  0.89004  2.44419 
## 
## Random effects:
##  Groups               Name        Variance Std.Dev.
##  person_code:vignette (Intercept)   49.4    7.028  
##  vignette             (Intercept)  151.0   12.288  
##  Residual                         1349.7   36.738  
## Number of obs: 7417, groups:  person_code:vignette, 54; vignette, 3
## 
## Fixed effects:
##                          Estimate Std. Error         df t value Pr(>|t|)    
## (Intercept)              37.99730    7.75492    2.72402   4.900   0.0203 *  
## compYes                  -2.80876    1.33188 1483.70966  -2.109   0.0351 *  
## age                       0.08733    0.03560 7110.29717   2.453   0.0142 *  
## gender2male              -0.04987    0.95833 7402.27863  -0.052   0.9585    
## education                -0.10224    0.18781 5969.67367  -0.544   0.5862    
## condIgnorance            17.93814    1.12847 7368.87484  15.896   <2e-16 ***
## condKnowledge            19.76367    1.12575 7372.00590  17.556   <2e-16 ***
## turkTRUE                 -2.13558    4.76143   49.74337  -0.449   0.6557    
## condIgnorance:turkTRUE   -3.29727    3.09170 7361.03046  -1.066   0.2862    
## condKnowledge:turkTRUE    1.40167    3.03440 7359.24962   0.462   0.6441    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.122                                                        
## age         -0.100  0.140                                                 
## gender2male -0.032 -0.004  0.014                                          
## education   -0.320 -0.041 -0.094 -0.010                                   
## condIgnornc -0.074  0.002  0.004  0.001  0.002                            
## condKnowldg -0.074  0.003 -0.001  0.000  0.002  0.506                     
## turkTRUE    -0.020 -0.097 -0.113 -0.055  0.002  0.118  0.119              
## cndIgn:TRUE  0.027 -0.002 -0.008  0.003  0.001 -0.365 -0.185 -0.313       
## cndKnw:TRUE  0.028 -0.001  0.001 -0.001 -0.004 -0.188 -0.371 -0.320  0.493

Qualtrics versus SoSciSurvey

  • This section examines if there is an interaction between data collection source (Qualtrics versus SoSciSurvey) and the results with condition.
table(final_long$source, useNA = "ifany")
## 
##   Qualtrics SoSciSurvey 
##        1668       12810

Knowledge

  • No interaction of source and condition results.
View full results
k.model.qualtrics <- glmer(know_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*source,
                      data = final_long,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(k.model.qualtrics)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * source
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15873.7  15971.7  -7923.9  15847.7    13882 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.8113 -0.6192 -0.4883  0.7791  4.0997 
## 
## Random effects:
##  Groups                    Name        Variance            Std.Dev.    
##  person_code:(id:vignette) (Intercept) 0.00000000000003787 0.0000001946
##  id:vignette               (Intercept) 0.00000000000000000 0.0000000000
##  vignette                  (Intercept) 0.44757953395657191 0.6690138518
## Number of obs: 13895, groups:  
## person_code:(id:vignette), 13895; id:vignette, 13895; vignette, 3
## 
## Fixed effects:
##                                  Estimate Std. Error z value     Pr(>|z|)    
## (Intercept)                     -0.110683   0.419438  -0.264       0.7919    
## compYes                          0.008579   0.046544   0.184       0.8538    
## age                              0.002997   0.002200   1.362       0.1731    
## gender2male                     -0.087356   0.042739  -2.044       0.0410 *  
## education                       -0.016789   0.007530  -2.230       0.0258 *  
## condIgnorance                   -1.478409   0.150831  -9.802      < 2e-16 ***
## condKnowledge                    0.714284   0.131647   5.426 0.0000000577 ***
## sourceSoSciSurvey               -0.056457   0.102949  -0.548       0.5834    
## condIgnorance:sourceSoSciSurvey  0.186034   0.159820   1.164       0.2444    
## condKnowledge:sourceSoSciSurvey -0.115661   0.139726  -0.828       0.4078    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw srcSSS cI:SSS
## compYes     -0.130                                                        
## age         -0.182  0.292                                                 
## gender2male -0.044  0.034 -0.052                                          
## education   -0.216 -0.126 -0.123  0.032                                   
## condIgnornc -0.135  0.000 -0.002  0.006  0.003                            
## condKnowldg -0.154 -0.001  0.000 -0.005 -0.002  0.428                     
## sorcSScSrvy -0.262  0.189  0.274  0.047 -0.030  0.549  0.629              
## cndIgnr:SSS  0.127  0.001  0.001 -0.002  0.000 -0.943 -0.405 -0.585       
## cndKnwl:SSS  0.145  0.001  0.000  0.003  0.000 -0.404 -0.941 -0.669  0.431
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')

Reasonable

  • No interaction of source of data collection and condition.
View full results
r.model.qualtrics <- glmer(reason_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*source,
                      data = final_long,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(r.model.qualtrics)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * source
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7009.4   7107.5  -3491.7   6983.4    13961 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -9.6066  0.2058  0.2557  0.3091  0.7091 
## 
## Random effects:
##  Groups                    Name        Variance       Std.Dev.  
##  person_code:(id:vignette) (Intercept) 0.001105542067 0.03324969
##  id:vignette               (Intercept) 0.000000001287 0.00003588
##  vignette                  (Intercept) 0.076735814599 0.27701230
## Number of obs: 13974, groups:  
## person_code:(id:vignette), 13974; id:vignette, 13974; vignette, 3
## 
## Fixed effects:
##                                  Estimate Std. Error z value   Pr(>|z|)    
## (Intercept)                      3.107859   0.368450   8.435    < 2e-16 ***
## compYes                          0.108354   0.076479   1.417    0.15655    
## age                             -0.013268   0.003389  -3.915 0.00009024 ***
## gender2male                     -0.209617   0.070832  -2.959    0.00308 ** 
## education                        0.057681   0.011639   4.956 0.00000072 ***
## condIgnorance                   -0.801616   0.300334  -2.669    0.00761 ** 
## condKnowledge                    0.546456   0.403795   1.353    0.17596    
## sourceSoSciSurvey               -1.035668   0.259899  -3.985 0.00006751 ***
## condIgnorance:sourceSoSciSurvey  0.428385   0.310483   1.380    0.16767    
## condKnowledge:sourceSoSciSurvey -0.126980   0.414413  -0.306    0.75929    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw srcSSS cI:SSS
## compYes     -0.277                                                        
## age         -0.365  0.333                                                 
## gender2male -0.094  0.041 -0.052                                          
## education   -0.395 -0.093 -0.086  0.037                                   
## condIgnornc -0.553  0.002  0.004  0.001  0.000                            
## condKnowldg -0.411  0.002  0.003  0.002  0.000  0.503                     
## sorcSScSrvy -0.732  0.145  0.188  0.034 -0.001  0.782  0.582              
## cndIgnr:SSS  0.534 -0.003 -0.003 -0.001  0.000 -0.967 -0.486 -0.800       
## cndKnwl:SSS  0.400 -0.002 -0.003 -0.003  0.002 -0.490 -0.974 -0.600  0.502
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')

Luck

  • No interaction of the survey programming with luck as the dependent variable.
View full results
l.model.qualtrics <- glmer(luck_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*source,
                      data = final_luck,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(l.model.qualtrics)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * source
##    Data: final_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15875.8  15972.6  -7924.9  15849.8    12627 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.2714 -0.8127 -0.4484  0.9056  2.4708 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.006534 0.08083 
##  id:vignette               (Intercept) 0.007099 0.08426 
##  vignette                  (Intercept) 0.373772 0.61137 
## Number of obs: 12640, groups:  
## person_code:(id:vignette), 12640; id:vignette, 12640; vignette, 3
## 
## Fixed effects:
##                                  Estimate Std. Error z value          Pr(>|z|)
## (Intercept)                     -0.693905   0.391717  -1.771           0.07649
## compYes                         -0.116423   0.045887  -2.537           0.01118
## age                              0.008634   0.002178   3.965 0.000073309386957
## gender2male                      0.019755   0.042317   0.467           0.64062
## education                       -0.033060   0.007507  -4.404 0.000010622832056
## condIgnorance                    0.945502   0.144940   6.523 0.000000000068728
## condKnowledge                    1.045767   0.141417   7.395 0.000000000000141
## sourceSoSciSurvey                0.351019   0.114721   3.060           0.00222
## condIgnorance:sourceSoSciSurvey  0.091837   0.153188   0.600           0.54884
## condKnowledge:sourceSoSciSurvey -0.122297   0.149856  -0.816           0.41445
##                                    
## (Intercept)                     .  
## compYes                         *  
## age                             ***
## gender2male                        
## education                       ***
## condIgnorance                   ***
## condKnowledge                   ***
## sourceSoSciSurvey               ** 
## condIgnorance:sourceSoSciSurvey    
## condKnowledge:sourceSoSciSurvey    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw srcSSS cI:SSS
## compYes     -0.137                                                        
## age         -0.196  0.284                                                 
## gender2male -0.042  0.031 -0.052                                          
## education   -0.229 -0.127 -0.122  0.019                                   
## condIgnornc -0.192  0.001  0.007  0.004 -0.003                            
## condKnowldg -0.197  0.002  0.011 -0.002 -0.006  0.530                     
## sorcSScSrvy -0.305  0.173  0.255  0.037 -0.032  0.654  0.671              
## cndIgnr:SSS  0.182 -0.002 -0.001 -0.003 -0.002 -0.945 -0.501 -0.691       
## cndKnwl:SSS  0.186 -0.005 -0.006  0.002  0.001 -0.499 -0.943 -0.707  0.528

Table of Exclusions by Vignette and Condition

  • Exclusions if one considered each comprehension check separately.
# 1 is right 2 is wrong for GK 
# 1 is wrong 2 is right for I
full_long$correct <- NA
full_long$correct[full_long$cond == "Ignorance" & full_long$compr == "2"] <- TRUE
full_long$correct[full_long$cond == "Ignorance" & full_long$compr == "1"] <- FALSE
full_long$correct[full_long$cond != "Ignorance" & full_long$compr == "1"] <- TRUE
full_long$correct[full_long$cond != "Ignorance" & full_long$compr == "2"] <- FALSE

flextable(full_long %>% 
            group_by(vignette, cond, correct) %>% 
            select(vignette, cond, correct) %>% 
            drop_na() %>% 
            count())

vignette

cond

correct

n

Darrel

Gettier

FALSE

344

Darrel

Gettier

TRUE

2,176

Darrel

Ignorance

FALSE

520

Darrel

Ignorance

TRUE

2,301

Darrel

Knowledge

FALSE

274

Darrel

Knowledge

TRUE

2,364

Emma

Gettier

FALSE

458

Emma

Gettier

TRUE

2,201

Emma

Ignorance

FALSE

375

Emma

Ignorance

TRUE

2,324

Emma

Knowledge

FALSE

359

Emma

Knowledge

TRUE

2,285

Gerald

Gettier

FALSE

676

Gerald

Gettier

TRUE

2,130

Gerald

Ignorance

FALSE

301

Gerald

Ignorance

TRUE

2,184

Gerald

Knowledge

FALSE

466

Gerald

Knowledge

TRUE

2,243

Only Exclude For Condition Incorrect

  • The first results presented in each section are analyses wherein each section excludes only the vignettes that were answered correctly, and the second summary is the original information presented in the manuscript.
# data prep
exclude_DF <- full_long %>% 
  filter(correct == TRUE) %>% 
  filter(age_exclusion == FALSE) %>% 
  filter(previous_exclusion == FALSE) %>% 
  filter(purpose_exclusion == FALSE) %>% 
  filter(lang_exclusion == FALSE)

nrow(exclude_DF)
## [1] 18455
length(unique(exclude_DF$id))
## [1] 7049
length(unique(exclude_DF$person_code))
## [1] 37
# full data
table(full_long$vignette, full_long$cond, useNA = "ifany")
##         
##          Gettier Ignorance Knowledge
##   Darrel    2821      3153      2972
##   Emma      2982      3034      2930
##   Gerald    3143      2759      3044
##   <NA>       494       494       494
# exclude based on incorrect answers 
table(exclude_DF$vignette, exclude_DF$cond, useNA = "ifany")
##         
##          Gettier Ignorance Knowledge
##   Darrel    1986      2119      2174
##   Emma      2009      2104      2085
##   Gerald    1942      2001      2035
# knowledge recode
exclude_DF$know_vas_binned <- exclude_DF$know_vas
exclude_DF$know_vas_binned[exclude_DF$know_vas_binned <= 40] <- 2
exclude_DF$know_vas_binned[exclude_DF$know_vas_binned > 40 & 
                             exclude_DF$know_vas_binned < 60] <- NA
exclude_DF$know_vas_binned[exclude_DF$know_vas_binned >= 60] <- 1
exclude_DF$know_vas_combined <- ifelse(is.na(exclude_DF$know_vas_binned), 
                                     exclude_DF$know_bin, 
                                     exclude_DF$know_vas_binned)

exclude_DF$know_vas_combined <- 3 - exclude_DF$know_vas_combined

# reason recode
exclude_DF$reason_vas_binned <- exclude_DF$reason_vas
exclude_DF$reason_vas_binned[exclude_DF$reason_vas_binned <= 40] <- 2
exclude_DF$reason_vas_binned[exclude_DF$reason_vas_binned > 40 & 
                             exclude_DF$reason_vas_binned < 60] <- NA
exclude_DF$reason_vas_binned[exclude_DF$reason_vas_binned >= 60] <- 1
exclude_DF$reason_vas_combined <- ifelse(is.na(exclude_DF$reason_vas_binned), 
                                     exclude_DF$reason_bin, 
                                     exclude_DF$reason_vas_binned)

exclude_DF$reason_vas_combined <- 3 - exclude_DF$reason_vas_combined

# luck recode
exclude_DF$luck_vas_binned <- exclude_DF$luck_vas
exclude_DF$luck_vas_binned[exclude_DF$luck_vas_binned <= 40] <- 2
exclude_DF$luck_vas_binned[exclude_DF$luck_vas_binned > 40 & 
                             exclude_DF$luck_vas_binned < 60] <- NA
exclude_DF$luck_vas_binned[exclude_DF$luck_vas_binned >= 60] <- 1
exclude_DF$luck_vas_combined <- ifelse(is.na(exclude_DF$luck_vas_binned), 
                                     exclude_DF$luck_bin, 
                                     exclude_DF$luck_vas_binned)

exclude_DF$luck_vas_combined <- 3 - exclude_DF$luck_vas_combined

# for luck analyses people should be excluded if they get the answer wrong 
exclude_DF$luck_correct <- FALSE

exclude_DF$ri_wr <- factor(exclude_DF$ri_wr, 
                           levels = c(1,2),
                           labels = c("Right", "Wrong"))
exclude_DF$luck_correct[exclude_DF$cond == "Ignorance" & exclude_DF$ri_wr == "Wrong"] <- TRUE
exclude_DF$luck_correct[exclude_DF$cond != "Ignorance" & exclude_DF$ri_wr == "Right"] <- TRUE

table(exclude_DF$luck_correct)
## 
## FALSE  TRUE 
##  1473 16982
# fix other variables
exclude_DF$gender2 <- factor(exclude_DF$gender,
                             levels = c("female", "male"))

# subset the wrong answers 
exclude_luck <- subset(exclude_DF, luck_correct)

Knowledge

Condition

  • Results from coefficients are within a confidence interval of each other.
View full results
exclude_DF$know_vas_combined <- exclude_DF$know_vas_combined - 1
k.cond.exclude <- glmer(know_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond,
                      data = exclude_DF,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(k.cond.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond
##    Data: exclude_DF
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  20472.8  20550.6 -10226.4  20452.8    17617 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.7573 -0.6379 -0.3242  0.7773  3.5460 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.0000   0.0000  
##  id:vignette               (Intercept) 0.0000   0.0000  
##  vignette                  (Intercept) 0.3965   0.6297  
## Number of obs: 17627, groups:  
## person_code:(id:vignette), 17627; id:vignette, 17627; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value Pr(>|z|)    
## (Intercept)   -0.193853   0.378270  -0.512   0.6083    
## compYes        0.023915   0.039203   0.610   0.5418    
## age            0.004261   0.001747   2.439   0.0147 *  
## gender2male   -0.057121   0.037166  -1.537   0.1243    
## education     -0.013869   0.006537  -2.122   0.0339 *  
## condIgnorance -1.261018   0.043885 -28.735   <2e-16 ***
## condKnowledge  0.585633   0.039344  14.885   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.073                                   
## age         -0.104  0.210                            
## gender2male -0.028  0.013 -0.090                     
## education   -0.219 -0.113 -0.102  0.036              
## condIgnornc -0.049 -0.005  0.005  0.006  0.007       
## condKnowldg -0.052  0.004  0.005 -0.012 -0.009  0.450
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
summary(log_exclude[[6]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15872.0  15947.4  -7926.0  15852.0    13885 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.7728 -0.6220 -0.4902  0.7726  3.8696 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.0000   0.0000  
##  id:vignette               (Intercept) 0.0000   0.0000  
##  vignette                  (Intercept) 0.4474   0.6689  
## Number of obs: 13895, groups:  
## person_code:(id:vignette), 13895; id:vignette, 13895; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value Pr(>|z|)    
## (Intercept)   -0.183810   0.404093  -0.455   0.6492    
## compYes        0.019364   0.044633   0.434   0.6644    
## age            0.003733   0.001997   1.870   0.0615 .  
## gender2male   -0.084539   0.042604  -1.984   0.0472 *  
## education     -0.017069   0.007528  -2.267   0.0234 *  
## condIgnorance -1.313222   0.050238 -26.140   <2e-16 ***
## condKnowledge  0.611665   0.044435  13.765   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.075                                   
## age         -0.107  0.197                            
## gender2male -0.030  0.014 -0.090                     
## education   -0.235 -0.118 -0.115  0.035              
## condIgnornc -0.049  0.001 -0.007  0.011  0.008       
## condKnowldg -0.054  0.001  0.002 -0.006 -0.005  0.431
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')

Vignette

  • Results from coefficients are within a confidence interval of each other.
View full results
k.vignette.exclude <- glmer(know_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*vignette,
                      data = exclude_DF,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(k.vignette.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * vignette
##    Data: exclude_DF
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  20394.5  20518.9 -10181.3  20362.5    17611 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.7656 -0.5950 -0.3998  0.7729  2.8762 
## 
## Random effects:
##  Groups                    Name        Variance         Std.Dev.   
##  person_code:(id:vignette) (Intercept) 0.00000000002716 0.000005211
##  id:vignette               (Intercept) 0.00000001285080 0.000113361
##  vignette                  (Intercept) 0.00000000000000 0.000000000
## Number of obs: 17627, groups:  
## person_code:(id:vignette), 17627; id:vignette, 17627; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value Pr(>|z|)    
## (Intercept)                   0.519592   0.111542   4.658 3.19e-06 ***
## compYes                       0.023380   0.039387   0.594 0.552775    
## age                           0.004292   0.001754   2.447 0.014414 *  
## gender2male                  -0.059702   0.037333  -1.599 0.109784    
## education                    -0.013795   0.006568  -2.100 0.035701 *  
## condIgnorance                -1.516116   0.069414 -21.842  < 2e-16 ***
## condKnowledge                 0.479315   0.067636   7.087 1.37e-12 ***
## vignetteEmma                 -1.820538   0.074033 -24.591  < 2e-16 ***
## vignetteGerald               -0.388143   0.066356  -5.849 4.93e-09 ***
## condIgnorance:vignetteEmma    0.912496   0.112782   8.091 5.93e-16 ***
## condKnowledge:vignetteEmma    0.340920   0.100124   3.405 0.000662 ***
## condIgnorance:vignetteGerald  0.132929   0.100792   1.319 0.187223    
## condKnowledge:vignetteGerald  0.045013   0.094871   0.474 0.635168    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
summary(log_exclude[[7]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * vignette
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15807.7  15928.3  -7887.8  15775.7    13879 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.7804 -0.5717 -0.4484  0.7819  3.1287 
## 
## Random effects:
##  Groups                    Name        Variance           Std.Dev.    
##  person_code:(id:vignette) (Intercept) 0.0000000000172891 0.0000041580
##  id:vignette               (Intercept) 0.0000000000007192 0.0000008481
##  vignette                  (Intercept) 0.0000000000000000 0.0000000000
## Number of obs: 13895, groups:  
## person_code:(id:vignette), 13895; id:vignette, 13895; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value          Pr(>|z|)    
## (Intercept)                   0.555020   0.126409   4.391 0.000011300772713 ***
## compYes                       0.016780   0.044844   0.374          0.708268    
## age                           0.003778   0.002004   1.886          0.059360 .  
## gender2male                  -0.087674   0.042794  -2.049          0.040489 *  
## education                    -0.016600   0.007561  -2.196          0.028122 *  
## condIgnorance                -1.604292   0.080023 -20.048           < 2e-16 ***
## condKnowledge                 0.503172   0.076198   6.603 0.000000000040161 ***
## vignetteEmma                 -1.930434   0.084439 -22.862           < 2e-16 ***
## vignetteGerald               -0.390320   0.073064  -5.342 0.000000091833949 ***
## condIgnorance:vignetteEmma    0.977499   0.132791   7.361 0.000000000000182 ***
## condKnowledge:vignetteEmma    0.395026   0.114528   3.449          0.000562 ***
## condIgnorance:vignetteGerald  0.223192   0.113776   1.962          0.049800 *  
## condKnowledge:vignetteGerald  0.021780   0.106168   0.205          0.837455    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')

Turk

  • Results from coefficients are within a confidence interval of each other. The significance level changes for the Turk analysis, however, this is likely due to power, as there are more participants in this analysis.
  • The pattern of data suggests the same results, with more selection of believes in the ignorance condition.
View full results
k.turk.exclude <- glmer(know_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*turk,
                      data = exclude_DF,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(k.turk.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * turk
##    Data: exclude_DF
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  20428.9  20530.0 -10201.4  20402.9    17614 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1976 -0.6499 -0.3169  0.7909  3.6839 
## 
## Random effects:
##  Groups                    Name        Variance           Std.Dev.   
##  person_code:(id:vignette) (Intercept) 0.0000000000001772 0.000000421
##  id:vignette               (Intercept) 0.0000000000000000 0.000000000
##  vignette                  (Intercept) 0.3980771543788082 0.630933558
## Number of obs: 17627, groups:  
## person_code:(id:vignette), 17627; id:vignette, 17627; vignette, 3
## 
## Fixed effects:
##                          Estimate Std. Error z value Pr(>|z|)    
## (Intercept)            -0.0734328  0.3796903  -0.193 0.846644    
## compYes                -0.0354153  0.0406158  -0.872 0.383231    
## age                     0.0002084  0.0019163   0.109 0.913387    
## gender2male            -0.0894413  0.0377330  -2.370 0.017770 *  
## education              -0.0137125  0.0065381  -2.097 0.035964 *  
## condIgnorance          -1.2265206  0.0454744 -26.972  < 2e-16 ***
## condKnowledge           0.5681281  0.0408046  13.923  < 2e-16 ***
## turkTRUE                0.4431109  0.1154278   3.839 0.000124 ***
## condIgnorance:turkTRUE -0.5137547  0.1738963  -2.954 0.003133 ** 
## condKnowledge:turkTRUE  0.2523092  0.1570769   1.606 0.108213    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.086                                                        
## age         -0.119  0.292                                                 
## gender2male -0.037  0.053 -0.022                                          
## education   -0.218 -0.111 -0.097  0.035                                   
## condIgnornc -0.052  0.001  0.012  0.009  0.005                            
## condKnowldg -0.053  0.003  0.000 -0.010 -0.010  0.452                     
## turkTRUE     0.028 -0.173 -0.260 -0.097  0.001  0.158  0.182              
## cndIgn:TRUE  0.011  0.002  0.005  0.001  0.004 -0.259 -0.120 -0.607       
## cndKnw:TRUE  0.012  0.002  0.012 -0.004  0.002 -0.120 -0.258 -0.673  0.445
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
summary(log_exclude[[8]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * turk
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15850.2  15948.2  -7912.1  15824.2    13882 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1117 -0.6244 -0.4852  0.7822  3.9470 
## 
## Random effects:
##  Groups                    Name        Variance           Std.Dev.    
##  person_code:(id:vignette) (Intercept) 0.0000000000002108 0.0000004591
##  id:vignette               (Intercept) 0.0000000000024070 0.0000015515
##  vignette                  (Intercept) 0.4494139896495767 0.6703834646
## Number of obs: 13895, groups:  
## person_code:(id:vignette), 13895; id:vignette, 13895; vignette, 3
## 
## Fixed effects:
##                          Estimate Std. Error z value Pr(>|z|)    
## (Intercept)            -0.0743861  0.4058531  -0.183  0.85458    
## compYes                -0.0330383  0.0463320  -0.713  0.47580    
## age                    -0.0001138  0.0022169  -0.051  0.95906    
## gender2male            -0.1113136  0.0431663  -2.579  0.00992 ** 
## education              -0.0164575  0.0075305  -2.185  0.02886 *  
## condIgnorance          -1.2894721  0.0521660 -24.719  < 2e-16 ***
## condKnowledge           0.5914056  0.0461318  12.820  < 2e-16 ***
## turkTRUE                0.3226120  0.1260957   2.558  0.01051 *  
## condIgnorance:turkTRUE -0.3324922  0.1930846  -1.722  0.08507 .  
## condKnowledge:turkTRUE  0.2986885  0.1735316   1.721  0.08521 .  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.090                                                        
## age         -0.124  0.288                                                 
## gender2male -0.039  0.053 -0.021                                          
## education   -0.233 -0.119 -0.112  0.032                                   
## condIgnornc -0.052  0.006  0.001  0.011  0.007                            
## condKnowldg -0.055 -0.001 -0.002 -0.006 -0.005  0.433                     
## turkTRUE     0.031 -0.182 -0.282 -0.098  0.011  0.158  0.184              
## cndIgn:TRUE  0.013 -0.001  0.001  0.007  0.001 -0.268 -0.119 -0.588       
## cndKnw:TRUE  0.014  0.000  0.002 -0.001  0.002 -0.118 -0.263 -0.654  0.427
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
data_graph <- k.turk.exclude@frame
data_graph$know_vas_combined <- factor(data_graph$know_vas_combined,
                                          levels = c(0,1),
                                          labels = c("Believes", "Knows"))

#graph the three way binary 
ggplot(data_graph) +
  geom_mosaic(aes(x = product(know_vas_combined, cond, turk), 
                  fill = know_vas_combined), color = "black", size = .5) + 
  scale_fill_brewer(palette = "Greys", name = "Knowledge Choice", 
                    direction = -1) + 
  theme(text = element_text(size = 15)) + 
  scale_x_productlist(breaks = c(.5,.95),
   labels = c("College", "MTurk")) + 
  theme_classic() + 
  xlab("Turk") + 
  ylab("Condition")
## Warning: `unite_()` was deprecated in tidyr 1.2.0.
## ℹ Please use `unite()` instead.
## ℹ The deprecated feature was likely used in the ggmosaic package.
##   Please report the issue at <]8;;https://github.com/haleyjeppson/ggmosaichttps://github.com/haleyjeppson/ggmosaic]8;;>.
## This warning is displayed once every 8 hours.
## Call `lifecycle::last_lifecycle_warnings()` to see where this warning was
## generated.

Reasonable

Condition

  • Results from coefficients are within a confidence interval of each other.
View full results
exclude_DF$reason_vas_combined <- exclude_DF$reason_vas_combined - 1
r.cond.exclude <- glmer(reason_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond,
                      data = exclude_DF,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(r.cond.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond
##    Data: exclude_DF
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   9517.7   9595.6  -4748.9   9497.7    17719 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -6.0153  0.2255  0.2701  0.3203  0.6333 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.005945 0.07711 
##  id:vignette               (Intercept) 0.000840 0.02898 
##  vignette                  (Intercept) 0.053351 0.23098 
## Number of obs: 17729, groups:  
## person_code:(id:vignette), 17729; id:vignette, 17729; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value     Pr(>|z|)    
## (Intercept)    1.732079   0.210919   8.212      < 2e-16 ***
## compYes        0.269602   0.061981   4.350 0.0000136278 ***
## age           -0.003187   0.002776  -1.148     0.251052    
## gender2male   -0.211450   0.059866  -3.532     0.000412 ***
## education      0.053084   0.010104   5.254 0.0000001489 ***
## condIgnorance -0.342278   0.065361  -5.237 0.0000001634 ***
## condKnowledge  0.410338   0.076235   5.383 0.0000000734 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.210                                   
## age         -0.313  0.210                            
## gender2male -0.088  0.012 -0.093                     
## education   -0.603 -0.091 -0.085  0.032              
## condIgnornc -0.182 -0.009  0.016  0.001 -0.002       
## condKnowldg -0.154  0.006  0.002 -0.010 -0.001  0.496
summary(log_exclude[[14]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7047.1   7122.6  -3513.6   7027.1    13964 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -6.6491  0.2093  0.2544  0.3073  0.5823 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.001461 0.03822 
##  id:vignette               (Intercept) 0.007296 0.08542 
##  vignette                  (Intercept) 0.076656 0.27687 
## Number of obs: 13974, groups:  
## person_code:(id:vignette), 13974; id:vignette, 13974; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value          Pr(>|z|)    
## (Intercept)    1.839819   0.248928   7.391 0.000000000000146 ***
## compYes        0.251565   0.072504   3.470          0.000521 ***
## age           -0.004968   0.003209  -1.548          0.121581    
## gender2male   -0.181153   0.070632  -2.565          0.010326 *  
## education      0.057713   0.011926   4.839 0.000001303215506 ***
## condIgnorance -0.400271   0.075940  -5.271 0.000000135772713 ***
## condKnowledge  0.425613   0.090671   4.694 0.000002678579960 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.202                                   
## age         -0.297  0.201                            
## gender2male -0.085  0.014 -0.095                     
## education   -0.599 -0.098 -0.097  0.030              
## condIgnornc -0.178 -0.005  0.002  0.002  0.001       
## condKnowldg -0.154  0.003  0.003 -0.002  0.007  0.487

Vignette

  • Results from coefficients are within a confidence interval of each other.
View full results
r.vignette.exclude <- glmer(reason_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*vignette,
                      data = exclude_DF,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(r.vignette.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * vignette
##    Data: exclude_DF
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   9493.4   9617.9  -4730.7   9461.4    17713 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -6.3289  0.2153  0.2696  0.3289  0.6023 
## 
## Random effects:
##  Groups                    Name        Variance  Std.Dev.     
##  person_code:(id:vignette) (Intercept) 3.170e-03 0.05630464413
##  id:vignette               (Intercept) 3.097e-06 0.00175968972
##  vignette                  (Intercept) 5.124e-15 0.00000007158
## Number of obs: 17729, groups:  
## person_code:(id:vignette), 17729; id:vignette, 17729; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value           Pr(>|z|)
## (Intercept)                   2.274835   0.189689  11.992            < 2e-16
## compYes                       0.268822   0.062029   4.334 0.0000146542217458
## age                          -0.003160   0.002781  -1.136           0.255933
## gender2male                  -0.212684   0.059901  -3.551           0.000384
## education                     0.053685   0.010113   5.309 0.0000001104631506
## condIgnorance                -0.770139   0.134068  -5.744 0.0000000092242423
## condKnowledge                 0.243419   0.161056   1.511           0.130688
## vignetteEmma                 -1.008890   0.131325  -7.682 0.0000000000000156
## vignetteGerald               -0.493570   0.141754  -3.482           0.000498
## condIgnorance:vignetteEmma    0.733331   0.167123   4.388 0.0000114417360044
## condKnowledge:vignetteEmma    0.416902   0.199190   2.093           0.036350
## condIgnorance:vignetteGerald  0.358636   0.177623   2.019           0.043478
## condKnowledge:vignetteGerald -0.046783   0.207371  -0.226           0.821512
##                                 
## (Intercept)                  ***
## compYes                      ***
## age                             
## gender2male                  ***
## education                    ***
## condIgnorance                ***
## condKnowledge                   
## vignetteEmma                 ***
## vignetteGerald               ***
## condIgnorance:vignetteEmma   ***
## condKnowledge:vignetteEmma   *  
## condIgnorance:vignetteGerald *  
## condKnowledge:vignetteGerald    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
summary(log_exclude[[15]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * vignette
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7025.8   7146.5  -3496.9   6993.8    13958 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -7.4722  0.2020  0.2562  0.3173  0.5561 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.000000 0.00000 
##  id:vignette               (Intercept) 0.002124 0.04609 
##  vignette                  (Intercept) 0.000000 0.00000 
## Number of obs: 13974, groups:  
## person_code:(id:vignette), 13974; id:vignette, 13974; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value        Pr(>|z|)    
## (Intercept)                   2.425110   0.222940  10.878         < 2e-16 ***
## compYes                       0.248903   0.072555   3.431        0.000602 ***
## age                          -0.005006   0.003218  -1.556        0.119764    
## gender2male                  -0.181886   0.070666  -2.574        0.010056 *  
## education                     0.058594   0.011936   4.909 0.0000009160223 ***
## condIgnorance                -0.863705   0.160582  -5.379 0.0000000750731 ***
## condKnowledge                 0.390875   0.204642   1.910        0.056127 .  
## vignetteEmma                 -1.099565   0.155615  -7.066 0.0000000000016 ***
## vignetteGerald               -0.522306   0.168276  -3.104        0.001910 ** 
## condIgnorance:vignetteEmma    0.741195   0.196886   3.765        0.000167 ***
## condKnowledge:vignetteEmma    0.256617   0.245839   1.044        0.296559    
## condIgnorance:vignetteGerald  0.424379   0.210062   2.020        0.043357 *  
## condKnowledge:vignetteGerald -0.237181   0.255828  -0.927        0.353869    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')

Turk

  • Results from coefficients are within a confidence interval of each other. The significance level changes for the Turk analysis, however, this is likely due to power, as there are more participants in this analysis.
  • This pattern of results indicates the same results as other samples - very few people pick unreasonable overall with slightly more in the ignorance category.
View full results
r.turk.exclude <- glmer(reason_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*turk,
                      data = exclude_DF,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(r.turk.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * turk
##    Data: exclude_DF
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   9473.4   9574.6  -4723.7   9447.4    17716 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -8.9148  0.2232  0.2714  0.3219  0.6611 
## 
## Random effects:
##  Groups                    Name        Variance   Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.00275884 0.05252 
##  id:vignette               (Intercept) 0.00001282 0.00358 
##  vignette                  (Intercept) 0.05311902 0.23048 
## Number of obs: 17729, groups:  
## person_code:(id:vignette), 17729; id:vignette, 17729; vignette, 3
## 
## Fixed effects:
##                         Estimate Std. Error z value    Pr(>|z|)    
## (Intercept)             1.955658   0.212975   9.183     < 2e-16 ***
## compYes                 0.157029   0.064484   2.435      0.0149 *  
## age                    -0.010047   0.002887  -3.481      0.0005 ***
## gender2male            -0.264650   0.060312  -4.388 0.000011437 ***
## education               0.051578   0.009951   5.183 0.000000218 ***
## condIgnorance          -0.314020   0.066613  -4.714 0.000002428 ***
## condKnowledge           0.415941   0.077366   5.376 0.000000076 ***
## turkTRUE                1.500966   0.345523   4.344 0.000013988 ***
## condIgnorance:turkTRUE -0.916342   0.391519  -2.340      0.0193 *  
## condKnowledge:turkTRUE -0.310357   0.483036  -0.643      0.5205    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.258                                                        
## age         -0.352  0.312                                                 
## gender2male -0.115  0.053 -0.035                                          
## education   -0.592 -0.084 -0.071  0.038                                   
## condIgnornc -0.183 -0.006  0.019  0.004 -0.003                            
## condKnowldg -0.155  0.006  0.001 -0.009  0.000  0.492                     
## turkTRUE     0.042 -0.101 -0.137 -0.048 -0.008  0.108  0.095              
## cndIgn:TRUE  0.029  0.002  0.001 -0.005  0.002 -0.170 -0.084 -0.860       
## cndKnw:TRUE  0.023  0.001  0.006 -0.001 -0.001 -0.079 -0.160 -0.698  0.615
summary(log_exclude[[16]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * turk
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7017.4   7115.5  -3495.7   6991.4    13961 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -8.5047  0.2054  0.2546  0.3087  0.6997 
## 
## Random effects:
##  Groups                    Name        Variance     Std.Dev. 
##  person_code:(id:vignette) (Intercept) 0.0023770476 0.0487550
##  id:vignette               (Intercept) 0.0000001472 0.0003837
##  vignette                  (Intercept) 0.0768855931 0.2772825
## Number of obs: 13974, groups:  
## person_code:(id:vignette), 13974; id:vignette, 13974; vignette, 3
## 
## Fixed effects:
##                         Estimate Std. Error z value   Pr(>|z|)    
## (Intercept)             2.073866   0.251554   8.244    < 2e-16 ***
## compYes                 0.131776   0.075708   1.741   0.081757 .  
## age                    -0.012466   0.003376  -3.692   0.000222 ***
## gender2male            -0.232380   0.071089  -3.269   0.001080 ** 
## education               0.056843   0.011725   4.848 0.00000125 ***
## condIgnorance          -0.378343   0.077451  -4.885 0.00000103 ***
## condKnowledge           0.440103   0.092314   4.767 0.00000187 ***
## turkTRUE                1.385038   0.368873   3.755   0.000173 ***
## condIgnorance:turkTRUE -0.655384   0.427927  -1.532   0.125638    
## condKnowledge:turkTRUE -0.443704   0.514800  -0.862   0.388745    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.251                                                        
## age         -0.338  0.315                                                 
## gender2male -0.109  0.053 -0.039                                          
## education   -0.583 -0.094 -0.088  0.035                                   
## condIgnornc -0.180 -0.003  0.005  0.004  0.001                            
## condKnowldg -0.154  0.002  0.001 -0.003  0.007  0.484                     
## turkTRUE     0.044 -0.113 -0.156 -0.050 -0.003  0.120  0.101              
## cndIgn:TRUE  0.032  0.001  0.001 -0.002 -0.001 -0.181 -0.088 -0.835       
## cndKnw:TRUE  0.027  0.000  0.001  0.001 -0.002 -0.087 -0.179 -0.694  0.598
data_graph <- r.turk.exclude@frame
data_graph$reason_vas_combined <- factor(data_graph$reason_vas_combined,
                                          levels = c(0,1),
                                          labels = c("Unreasonable", "Reasonable"))

#graph the three way binary 
ggplot(data_graph) +
  geom_mosaic(aes(x = product(reason_vas_combined, cond, turk), 
                  fill = reason_vas_combined), color = "black", size = .5) + 
  scale_fill_brewer(palette = "Greys", name = "Reason Choice", 
                    direction = -1) + 
  theme(text = element_text(size = 15)) + 
  scale_x_productlist(breaks = c(.5,.95),
   labels = c("College", "MTurk")) + 
  theme_classic() + 
  xlab("Turk") + 
  ylab("Condition")

Luck

Condition

  • Results from coefficients are within a confidence interval of each other.
View full results
exclude_luck$luck_vas_combined <- exclude_luck$luck_vas_combined - 1
l.cond.exclude <- glmer(luck_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond,
                      data = exclude_luck,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(l.cond.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond
##    Data: exclude_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  20023.7  20100.4 -10001.8  20003.7    15862 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1927 -0.8876  0.4476  0.8101  2.1873 
## 
## Random effects:
##  Groups                    Name        Variance  Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.0092245 0.09604 
##  id:vignette               (Intercept) 0.0009849 0.03138 
##  vignette                  (Intercept) 0.3531817 0.59429 
## Number of obs: 15872, groups:  
## person_code:(id:vignette), 15872; id:vignette, 15872; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value    Pr(>|z|)    
## (Intercept)    0.196230   0.358969   0.547     0.58462    
## compYes        0.155525   0.039163   3.971 0.000071505 ***
## age           -0.004576   0.001756  -2.606     0.00915 ** 
## gender2male    0.029631   0.037335   0.794     0.42740    
## education      0.032963   0.006592   5.001 0.000000572 ***
## condIgnorance -0.991065   0.042358 -23.397     < 2e-16 ***
## condKnowledge -0.945551   0.042021 -22.502     < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.076                                   
## age         -0.111  0.203                            
## gender2male -0.027  0.011 -0.089                     
## education   -0.232 -0.110 -0.100  0.025              
## condIgnornc -0.061 -0.008  0.032 -0.002 -0.016       
## condKnowldg -0.057 -0.010  0.011 -0.010 -0.021  0.533
summary(log_exclude[[22]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond
##    Data: final_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15896.2  15970.6  -7938.1  15876.2    12630 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1575 -0.8039 -0.4574  0.9076  2.2438 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.009841 0.09920 
##  id:vignette               (Intercept) 0.004391 0.06626 
##  vignette                  (Intercept) 0.373469 0.61112 
## Number of obs: 12640, groups:  
## person_code:(id:vignette), 12640; id:vignette, 12640; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value  Pr(>|z|)    
## (Intercept)   -0.246316   0.372248  -0.662    0.5082    
## compYes       -0.180694   0.043976  -4.109 0.0000398 ***
## age            0.004153   0.001967   2.111    0.0348 *  
## gender2male    0.007267   0.042185   0.172    0.8632    
## education     -0.031096   0.007485  -4.155 0.0000326 ***
## condIgnorance  1.027734   0.047414  21.676   < 2e-16 ***
## condKnowledge  0.935679   0.047106  19.863   < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.079                                   
## age         -0.116  0.191                            
## gender2male -0.029  0.014 -0.086                     
## education   -0.253 -0.116 -0.109  0.023              
## condIgnornc -0.064 -0.007  0.018  0.002 -0.014       
## condKnowldg -0.062 -0.012  0.009 -0.001 -0.017  0.525

Vignette

  • Results from coefficients are within a confidence interval of each other.
View full results
l.vignette.exclude <- glmer(luck_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*vignette,
                      data = exclude_luck,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(l.vignette.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * vignette
##    Data: exclude_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  19522.3  19645.0  -9745.1  19490.3    15856 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -3.4178 -0.8420  0.2872  1.0139  2.3088 
## 
## Random effects:
##  Groups                    Name        Variance  Std.Dev. 
##  person_code:(id:vignette) (Intercept) 4.840e-10 2.200e-05
##  id:vignette               (Intercept) 7.903e-04 2.811e-02
##  vignette                  (Intercept) 8.701e-51 9.328e-26
## Number of obs: 15872, groups:  
## person_code:(id:vignette), 15872; id:vignette, 15872; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value Pr(>|z|)    
## (Intercept)                  -0.938281   0.114892  -8.167 3.17e-16 ***
## compYes                       0.161667   0.039751   4.067 4.76e-05 ***
## age                          -0.004615   0.001790  -2.578  0.00993 ** 
## gender2male                   0.039365   0.037780   1.042  0.29743    
## education                     0.033920   0.006705   5.059 4.22e-07 ***
## condIgnorance                -0.055473   0.069668  -0.796  0.42589    
## condKnowledge                -0.726025   0.073752  -9.844  < 2e-16 ***
## vignetteEmma                  2.711605   0.095022  28.537  < 2e-16 ***
## vignetteGerald                1.040804   0.071725  14.511  < 2e-16 ***
## condIgnorance:vignetteEmma   -2.367220   0.116687 -20.287  < 2e-16 ***
## condKnowledge:vignetteEmma   -0.957005   0.119734  -7.993 1.32e-15 ***
## condIgnorance:vignetteGerald -0.775527   0.099593  -7.787 6.86e-15 ***
## condKnowledge:vignetteGerald -0.137310   0.102304  -1.342  0.17954    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
summary(log_exclude[[23]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * vignette
##    Data: final_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15458.4  15577.5  -7713.2  15426.4    12624 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1894 -1.0118 -0.2784  0.8435  3.7019 
## 
## Random effects:
##  Groups                    Name        Variance  Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.0004629 0.02151 
##  id:vignette               (Intercept) 0.0015660 0.03957 
##  vignette                  (Intercept) 0.0000000 0.00000 
## Number of obs: 12640, groups:  
## person_code:(id:vignette), 12640; id:vignette, 12640; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value           Pr(>|z|)
## (Intercept)                   0.907304   0.128577   7.056 0.0000000000017077
## compYes                      -0.195308   0.044696  -4.370 0.0000124396016865
## age                           0.004152   0.002002   2.074             0.0381
## gender2male                  -0.004211   0.042725  -0.099             0.9215
## education                    -0.031082   0.007607  -4.086 0.0000439445273061
## condIgnorance                 0.083929   0.078418   1.070             0.2845
## condKnowledge                 0.723553   0.082462   8.774            < 2e-16
## vignetteEmma                 -2.851015   0.110619 -25.773            < 2e-16
## vignetteGerald               -1.070444   0.078819 -13.581            < 2e-16
## condIgnorance:vignetteEmma    2.527798   0.135060  18.716            < 2e-16
## condKnowledge:vignetteEmma    1.067342   0.138234   7.721 0.0000000000000115
## condIgnorance:vignetteGerald  0.723264   0.110503   6.545 0.0000000000594091
## condKnowledge:vignetteGerald  0.094904   0.113794   0.834             0.4043
##                                 
## (Intercept)                  ***
## compYes                      ***
## age                          *  
## gender2male                     
## education                    ***
## condIgnorance                   
## condKnowledge                ***
## vignetteEmma                 ***
## vignetteGerald               ***
## condIgnorance:vignetteEmma   ***
## condKnowledge:vignetteEmma   ***
## condIgnorance:vignetteGerald ***
## condKnowledge:vignetteGerald    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')

Turk

  • Results from coefficients are within a confidence interval of each other.
View full results
l.turk.exclude <- glmer(luck_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*turk,
                      data = exclude_luck,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(l.turk.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * turk
##    Data: exclude_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  19990.1  20089.8  -9982.1  19964.1    15859 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.4762 -0.8825  0.3890  0.8178  2.2976 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.006357 0.07973 
##  id:vignette               (Intercept) 0.003636 0.06030 
##  vignette                  (Intercept) 0.353443 0.59451 
## Number of obs: 15872, groups:  
## person_code:(id:vignette), 15872; id:vignette, 15872; vignette, 3
## 
## Fixed effects:
##                         Estimate Std. Error z value    Pr(>|z|)    
## (Intercept)             0.331809   0.359922   0.922    0.356586    
## compYes                 0.091648   0.040658   2.254    0.024188 *  
## age                    -0.009262   0.001936  -4.784 0.000001717 ***
## gender2male            -0.002261   0.037795  -0.060    0.952305    
## education               0.033580   0.006603   5.085 0.000000367 ***
## condIgnorance          -1.007100   0.043863 -22.960     < 2e-16 ***
## condKnowledge          -0.937813   0.043576 -21.521     < 2e-16 ***
## turkTRUE                0.430552   0.129627   3.321    0.000895 ***
## condIgnorance:turkTRUE  0.207977   0.171212   1.215    0.224468    
## condKnowledge:turkTRUE -0.144527   0.165501  -0.873    0.382516    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.090                                                        
## age         -0.127  0.286                                                 
## gender2male -0.035  0.048 -0.021                                          
## education   -0.231 -0.111 -0.098  0.022                                   
## condIgnornc -0.064 -0.004  0.035  0.001 -0.017                            
## condKnowldg -0.060 -0.005  0.015 -0.005 -0.021  0.531                     
## turkTRUE     0.025 -0.160 -0.250 -0.082  0.006  0.167  0.174              
## cndIgn:TRUE  0.014  0.004  0.005  0.001  0.006 -0.253 -0.133 -0.700       
## cndKnw:TRUE  0.014  0.007  0.016 -0.004  0.002 -0.137 -0.261 -0.726  0.547
summary(log_exclude[[24]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * turk
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7017.4   7115.5  -3495.7   6991.4    13961 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -8.5047  0.2054  0.2546  0.3087  0.6997 
## 
## Random effects:
##  Groups                    Name        Variance     Std.Dev. 
##  person_code:(id:vignette) (Intercept) 0.0023770476 0.0487550
##  id:vignette               (Intercept) 0.0000001472 0.0003837
##  vignette                  (Intercept) 0.0768855931 0.2772825
## Number of obs: 13974, groups:  
## person_code:(id:vignette), 13974; id:vignette, 13974; vignette, 3
## 
## Fixed effects:
##                         Estimate Std. Error z value   Pr(>|z|)    
## (Intercept)             2.073866   0.251554   8.244    < 2e-16 ***
## compYes                 0.131776   0.075708   1.741   0.081757 .  
## age                    -0.012466   0.003376  -3.692   0.000222 ***
## gender2male            -0.232380   0.071089  -3.269   0.001080 ** 
## education               0.056843   0.011725   4.848 0.00000125 ***
## condIgnorance          -0.378343   0.077451  -4.885 0.00000103 ***
## condKnowledge           0.440103   0.092314   4.767 0.00000187 ***
## turkTRUE                1.385038   0.368873   3.755   0.000173 ***
## condIgnorance:turkTRUE -0.655384   0.427927  -1.532   0.125638    
## condKnowledge:turkTRUE -0.443704   0.514800  -0.862   0.388745    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.251                                                        
## age         -0.338  0.315                                                 
## gender2male -0.109  0.053 -0.039                                          
## education   -0.583 -0.094 -0.088  0.035                                   
## condIgnornc -0.180 -0.003  0.005  0.004  0.001                            
## condKnowldg -0.154  0.002  0.001 -0.003  0.007  0.484                     
## turkTRUE     0.044 -0.113 -0.156 -0.050 -0.003  0.120  0.101              
## cndIgn:TRUE  0.032  0.001  0.001 -0.002 -0.001 -0.181 -0.088 -0.835       
## cndKnw:TRUE  0.027  0.000  0.001  0.001 -0.002 -0.087 -0.179 -0.694  0.598

Just Emma or Gerald

  • This second covers the “Direct” replication analysis on just Emma and just Gerald, given they correctly answered the question for that vignette correctly (the scoring for correct is presented earlier in this document).
# data prep
exclude_DF <- full_long %>% 
  filter(correct == TRUE) 

# knowledge recode
exclude_DF$know_vas_binned <- exclude_DF$know_vas
exclude_DF$know_vas_binned[exclude_DF$know_vas_binned <= 40] <- 2
exclude_DF$know_vas_binned[exclude_DF$know_vas_binned > 40 & 
                             exclude_DF$know_vas_binned < 60] <- NA
exclude_DF$know_vas_binned[exclude_DF$know_vas_binned >= 60] <- 1
exclude_DF$know_vas_combined <- ifelse(is.na(exclude_DF$know_vas_binned), 
                                     exclude_DF$know_bin, 
                                     exclude_DF$know_vas_binned)

exclude_DF$know_vas_combined <- 3 - exclude_DF$know_vas_combined

# fix other variables
exclude_DF$gender2 <- factor(exclude_DF$gender,
                             levels = c("female", "male"))

# gerald
gerald_DF <- exclude_DF %>% 
  filter(vignette_order == "GED" | vignette_order == "GDE") %>% 
  filter(vignette == "Gerald")
nrow(gerald_DF)
## [1] 2203
# emma
emma_DF <- exclude_DF %>% 
  filter(vignette_order == "EDG" | vignette_order == "EGD") %>% 
  filter(vignette == "Emma")
nrow(emma_DF)
## [1] 2295

Gerald

  • Overall chi-square is significant with a similar effect size to Darrel only.
  • Gettier versus Ignorance shows the same effect size and pattern of effects.
  • Gettier versus Knowledge shows the same effect size and pattern of effects.
    View full results
gerald_only <- chisq.test(gerald_DF$cond, gerald_DF$know_vas_combined)
MOTE_gerald <- v.chi.sq(
  x2 = gerald_only$statistic,
  n = sum(gerald_only$observed),
  r = 2,
  c = 3,
  a = .05
)

gerald_only
## 
##  Pearson's Chi-squared test
## 
## data:  gerald_DF$cond and gerald_DF$know_vas_combined
## X-squared = 211.49, df = 2, p-value < 2.2e-16
MOTE_gerald
## $v
## X-squared 
## 0.3148861 
## 
## $vlow
## [1] 0.2733672
## 
## $vhigh
## [1] 0.3579365
## 
## $n
## [1] 2133
## 
## $df
## [1] 2
## 
## $x2
## X-squared 
##  211.4939 
## 
## $p
##    X-squared 
## 1.187641e-46 
## 
## $estimate
## [1] "$V$ = .31, 95\\% CI [.27, .36]"
## 
## $statistic
## [1] "$\\chi^2$(2) = 211.49, $p$ < .001"
k.g_table_rep <- table(gerald_DF$know_vas_combined, gerald_DF$cond)
k.g_table_rep
##    
##     Gettier Ignorance Knowledge
##   1     377       541       274
##   2     301       179       461
k.gcond_GI_rep <- prop.test(t(k.g_table_rep[2:1, 1:2])) %>% 
  tidy()
k.gcond_GI_rep
## # A tibble: 1 × 9
##   estimate1 estimate2 statistic  p.value parame…¹ conf.…² conf.…³ method alter…⁴
##       <dbl>     <dbl>     <dbl>    <dbl>    <dbl>   <dbl>   <dbl> <chr>  <chr>  
## 1     0.444     0.249      58.2 2.33e-14        1   0.145   0.246 2-sam… two.si…
## # … with abbreviated variable names ¹​parameter, ²​conf.low, ³​conf.high,
## #   ⁴​alternative
k.gcond_GI_v_rep <- v.chi.sq(x2 = prop.test(t(k.g_table_rep[2:1, 1:2]))$statistic, 
         n = sum(t(k.g_table_rep[2:1, 1:2])),
         r = 2, c = 2) 
k.gcond_GI_v_rep
## $v
## X-squared 
## 0.2040981 
## 
## $vlow
## [1] 0.1540183
## 
## $vhigh
## [1] 0.2579082
## 
## $n
## [1] 1398
## 
## $df
## [1] 1
## 
## $x2
## X-squared 
##  58.23511 
## 
## $p
##             X-squared 
## 0.0000000000000232591 
## 
## $estimate
## [1] "$V$ = .20, 95\\% CI [.15, .26]"
## 
## $statistic
## [1] "$\\chi^2$(1) = 58.24, $p$ < .001"
k.gcond_GK_rep <- prop.test(t(k.g_table_rep[2:1, c(1,3)])) %>% 
  tidy()
k.gcond_GK_rep
## # A tibble: 1 × 9
##   estimate1 estimate2 statistic  p.value parame…¹ conf.…² conf.…³ method alter…⁴
##       <dbl>     <dbl>     <dbl>    <dbl>    <dbl>   <dbl>   <dbl> <chr>  <chr>  
## 1     0.444     0.627      46.9 7.34e-12        1  -0.236  -0.131 2-sam… two.si…
## # … with abbreviated variable names ¹​parameter, ²​conf.low, ³​conf.high,
## #   ⁴​alternative
k.gcond_GK_v_rep <- v.chi.sq(x2 = prop.test(t(k.g_table_rep[2:1, c(1,3)]))$statistic, 
         n = sum(t(k.g_table_rep[2:1, c(1,3)])),
         r = 2, c = 2) 
k.gcond_GK_v_rep
## $v
## X-squared 
## 0.1822556 
## 
## $vlow
## [1] 0.1328066
## 
## $vhigh
## [1] 0.2359011
## 
## $n
## [1] 1413
## 
## $df
## [1] 1
## 
## $x2
## X-squared 
##  46.93575 
## 
## $p
##           X-squared 
## 0.00000000000733525 
## 
## $estimate
## [1] "$V$ = .18, 95\\% CI [.13, .24]"
## 
## $statistic
## [1] "$\\chi^2$(1) = 46.94, $p$ < .001"
k.gcond_GI_rep$estimate1
## [1] 0.4439528
k.gcond_GI_rep$estimate2
## [1] 0.2486111
k.gcond_GK_rep$estimate1
## [1] 0.4439528
k.gcond_GK_rep$estimate2
## [1] 0.6272109

Emma

  • Overall chi-square is significant with a smaller effect size than Darrel or Gerald.
  • Gettier versus Ignorance shows the same pattern of effects with approximately half of the effect size.
  • Gettier versus Knowledge shows the same pattern of effects with approximately half of the effect size.
View full results
emma_only <- chisq.test(emma_DF$cond, emma_DF$know_vas_combined)
MOTE_emma <- v.chi.sq(
  x2 = emma_only$statistic,
  n = sum(emma_only$observed),
  r = 2,
  c = 3,
  a = .05
)

emma_only
## 
##  Pearson's Chi-squared test
## 
## data:  emma_DF$cond and emma_DF$know_vas_combined
## X-squared = 110.04, df = 2, p-value < 2.2e-16
MOTE_emma
## $v
## X-squared 
## 0.2222373 
## 
## $vlow
## [1] 0.182073
## 
## $vhigh
## [1] 0.264534
## 
## $n
## [1] 2228
## 
## $df
## [1] 2
## 
## $x2
## X-squared 
##  110.0397 
## 
## $p
##    X-squared 
## 1.274062e-24 
## 
## $estimate
## [1] "$V$ = .22, 95\\% CI [.18, .26]"
## 
## $statistic
## [1] "$\\chi^2$(2) = 110.04, $p$ < .001"
k.e_table_rep <- table(emma_DF$know_vas_combined, emma_DF$cond)
k.e_table_rep
##    
##     Gettier Ignorance Knowledge
##   1     554       664       447
##   2     164       120       279
k.econd_GI_rep <- prop.test(t(k.e_table_rep[2:1, 1:2])) %>% 
  tidy()
k.econd_GI_rep
## # A tibble: 1 × 9
##   estimate1 estimate2 statistic  p.value parame…¹ conf.…² conf.…³ method alter…⁴
##       <dbl>     <dbl>     <dbl>    <dbl>    <dbl>   <dbl>   <dbl> <chr>  <chr>  
## 1     0.228     0.153      13.4 0.000253        1  0.0343   0.116 2-sam… two.si…
## # … with abbreviated variable names ¹​parameter, ²​conf.low, ³​conf.high,
## #   ⁴​alternative
k.econd_GI_v_rep <- v.chi.sq(x2 = prop.test(t(k.e_table_rep[2:1, 1:2]))$statistic, 
         n = sum(t(k.e_table_rep[2:1, 1:2])),
         r = 2, c = 2) 
k.econd_GI_v_rep
## $v
##  X-squared 
## 0.09442094 
## 
## $vlow
## [1] 0.05087707
## 
## $vhigh
## [1] 0.1472713
## 
## $n
## [1] 1502
## 
## $df
## [1] 1
## 
## $x2
## X-squared 
##   13.3908 
## 
## $p
##    X-squared 
## 0.0002528613 
## 
## $estimate
## [1] "$V$ = .09, 95\\% CI [.05, .15]"
## 
## $statistic
## [1] "$\\chi^2$(1) = 13.39, $p$ < .001"
k.econd_GK_rep <- prop.test(t(k.e_table_rep[2:1, c(1,3)])) %>% 
  tidy()
k.econd_GK_rep
## # A tibble: 1 × 9
##   estimate1 estimate2 statistic  p.value parame…¹ conf.…² conf.…³ method alter…⁴
##       <dbl>     <dbl>     <dbl>    <dbl>    <dbl>   <dbl>   <dbl> <chr>  <chr>  
## 1     0.228     0.384      40.5 1.95e-10        1  -0.204  -0.108 2-sam… two.si…
## # … with abbreviated variable names ¹​parameter, ²​conf.low, ³​conf.high,
## #   ⁴​alternative
k.econd_GK_v_rep <- v.chi.sq(x2 = prop.test(t(k.e_table_rep[2:1, c(1,3)]))$statistic, 
         n = sum(t(k.e_table_rep[2:1, c(1,3)])),
         r = 2, c = 2) 
k.econd_GK_v_rep
## $v
## X-squared 
##   0.16751 
## 
## $vlow
## [1] 0.1188812
## 
## $vhigh
## [1] 0.2206628
## 
## $n
## [1] 1444
## 
## $df
## [1] 1
## 
## $x2
## X-squared 
##  40.51804 
## 
## $p
##         X-squared 
## 0.000000000194809 
## 
## $estimate
## [1] "$V$ = .17, 95\\% CI [.12, .22]"
## 
## $statistic
## [1] "$\\chi^2$(1) = 40.52, $p$ < .001"
k.econd_GI_rep$estimate1
## [1] 0.2284123
k.econd_GI_rep$estimate2
## [1] 0.1530612
k.econd_GK_rep$estimate1
## [1] 0.2284123
k.econd_GK_rep$estimate2
## [1] 0.3842975

Update Table 6/Figure 3

log_exclude[[7]]@frame %>% 
  mutate(know_vas_combined = factor(know_vas_combined, 
                                    levels = c(0,1), 
                                    labels = c("Believes", "Knows"))) %>% 
  group_by(know_vas_combined, cond, vignette) %>% 
  summarize(frequency = n()) %>% 
  arrange(desc(cond)) %>% 
  pivot_wider(id_cols = c(know_vas_combined, vignette), names_from = cond, values_from = frequency) %>% 
  ungroup() 
## # A tibble: 6 × 5
##   know_vas_combined vignette Knowledge Ignorance Gettier
##   <fct>             <fct>        <int>     <int>   <int>
## 1 Believes          Darrel         454      1170     615
## 2 Believes          Emma           993      1408    1266
## 3 Believes          Gerald         558      1255     757
## 4 Knows             Darrel        1126       353     923
## 5 Knows             Emma           531       164     276
## 6 Knows             Gerald         957       321     768

Just a Midpoint Split

  • The first results presented in each section represent the midpoint split, while the second results indicate the information presented in the manuscript.
# data prep
exclude_DF <- full_long %>% 
  filter(total_exclusion < 1)

# knowledge recode
exclude_DF$know_vas_binned <- exclude_DF$know_vas
exclude_DF$know_vas_binned[exclude_DF$know_vas_binned <= 50] <- 2
exclude_DF$know_vas_binned[exclude_DF$know_vas_binned >= 50] <- 1
exclude_DF$know_vas_combined <- ifelse(is.na(exclude_DF$know_vas_binned), 
                                     exclude_DF$know_bin, 
                                     exclude_DF$know_vas_binned)

exclude_DF$know_vas_combined <- 3 - exclude_DF$know_vas_combined

# reason recode
exclude_DF$reason_vas_binned <- exclude_DF$reason_vas
exclude_DF$reason_vas_binned[exclude_DF$reason_vas_binned <= 50] <- 2
exclude_DF$reason_vas_binned[exclude_DF$reason_vas_binned >= 50] <- 1
exclude_DF$reason_vas_combined <- ifelse(is.na(exclude_DF$reason_vas_binned), 
                                     exclude_DF$reason_bin, 
                                     exclude_DF$reason_vas_binned)

exclude_DF$reason_vas_combined <- 3 - exclude_DF$reason_vas_combined

# luck recode
exclude_DF$luck_vas_binned <- exclude_DF$luck_vas
exclude_DF$luck_vas_binned[exclude_DF$luck_vas_binned <= 50] <- 2
exclude_DF$luck_vas_binned[exclude_DF$luck_vas_binned >= 50] <- 1
exclude_DF$luck_vas_combined <- ifelse(is.na(exclude_DF$luck_vas_binned), 
                                     exclude_DF$luck_bin, 
                                     exclude_DF$luck_vas_binned)

exclude_DF$luck_vas_combined <- 3 - exclude_DF$luck_vas_combined

# for luck analyses people should be excluded if they get the answer wrong 
exclude_DF$luck_correct <- FALSE

exclude_DF$ri_wr <- factor(exclude_DF$ri_wr, 
                           levels = c(1,2),
                           labels = c("Right", "Wrong"))
exclude_DF$luck_correct[exclude_DF$cond == "Ignorance" & exclude_DF$ri_wr == "Wrong"] <- TRUE
exclude_DF$luck_correct[exclude_DF$cond != "Ignorance" & exclude_DF$ri_wr == "Right"] <- TRUE

table(exclude_DF$luck_correct)
## 
## FALSE  TRUE 
##   952 13526
# fix other variables
exclude_DF$gender2 <- factor(exclude_DF$gender,
                             levels = c("female", "male"))

# subset the wrong answers 
exclude_luck <- subset(exclude_DF, luck_correct)

Knowledge

Condition

  • Results from coefficients are within a confidence interval of each other.
View full results
exclude_DF$know_vas_combined <- exclude_DF$know_vas_combined - 1
k.cond.exclude <- glmer(know_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond,
                      data = exclude_DF,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(k.cond.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond
##    Data: exclude_DF
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  16394.9  16470.6  -8187.5  16374.9    14232 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.7439 -0.6376 -0.4957  0.7857  3.7587 
## 
## Random effects:
##  Groups                    Name        Variance  Std.Dev. 
##  person_code:(id:vignette) (Intercept) 8.481e-79 9.209e-40
##  id:vignette               (Intercept) 2.500e-13 5.000e-07
##  vignette                  (Intercept) 4.215e-01 6.492e-01
## Number of obs: 14242, groups:  
## person_code:(id:vignette), 14242; id:vignette, 14242; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value Pr(>|z|)    
## (Intercept)   -0.199080   0.392708  -0.507   0.6122    
## compYes        0.021265   0.043830   0.485   0.6276    
## age            0.003641   0.001969   1.850   0.0643 .  
## gender2male   -0.090075   0.041913  -2.149   0.0316 *  
## education     -0.016465   0.007417  -2.220   0.0264 *  
## condIgnorance -1.272714   0.049373 -25.778   <2e-16 ***
## condKnowledge  0.609300   0.043673  13.952   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.075                                   
## age         -0.108  0.193                            
## gender2male -0.029  0.014 -0.090                     
## education   -0.238 -0.119 -0.115  0.032              
## condIgnornc -0.049  0.001 -0.007  0.012  0.006       
## condKnowldg -0.055  0.002  0.003 -0.005 -0.005  0.431
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
summary(log_exclude[[6]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15872.0  15947.4  -7926.0  15852.0    13885 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.7728 -0.6220 -0.4902  0.7726  3.8696 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.0000   0.0000  
##  id:vignette               (Intercept) 0.0000   0.0000  
##  vignette                  (Intercept) 0.4474   0.6689  
## Number of obs: 13895, groups:  
## person_code:(id:vignette), 13895; id:vignette, 13895; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value Pr(>|z|)    
## (Intercept)   -0.183810   0.404093  -0.455   0.6492    
## compYes        0.019364   0.044633   0.434   0.6644    
## age            0.003733   0.001997   1.870   0.0615 .  
## gender2male   -0.084539   0.042604  -1.984   0.0472 *  
## education     -0.017069   0.007528  -2.267   0.0234 *  
## condIgnorance -1.313222   0.050238 -26.140   <2e-16 ***
## condKnowledge  0.611665   0.044435  13.765   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.075                                   
## age         -0.107  0.197                            
## gender2male -0.030  0.014 -0.090                     
## education   -0.235 -0.118 -0.115  0.035              
## condIgnornc -0.049  0.001 -0.007  0.011  0.008       
## condKnowldg -0.054  0.001  0.002 -0.006 -0.005  0.431
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')

Vignette

  • Results from coefficients are within a confidence interval of each other.
View full results
k.vignette.exclude <- glmer(know_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*vignette,
                      data = exclude_DF,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(k.vignette.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * vignette
##    Data: exclude_DF
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  16329.3  16450.3  -8148.6  16297.3    14226 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.7502 -0.5857 -0.4527  0.7933  3.0576 
## 
## Random effects:
##  Groups                    Name        Variance  Std.Dev.          
##  person_code:(id:vignette) (Intercept) 2.500e-13 0.0000005000000000
##  id:vignette               (Intercept) 6.091e-11 0.0000078046824951
##  vignette                  (Intercept) 2.108e-26 0.0000000000001452
## Number of obs: 14242, groups:  
## person_code:(id:vignette), 14242; id:vignette, 14242; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value           Pr(>|z|)
## (Intercept)                   0.523405   0.124250   4.213 0.0000252558666502
## compYes                       0.018982   0.044031   0.431           0.666390
## age                           0.003712   0.001975   1.879           0.060239
## gender2male                  -0.093158   0.042094  -2.213           0.026891
## education                    -0.015929   0.007448  -2.139           0.032465
## condIgnorance                -1.567344   0.078867 -19.873            < 2e-16
## condKnowledge                 0.498677   0.074927   6.656 0.0000000000282329
## vignetteEmma                 -1.889345   0.082955 -22.776            < 2e-16
## vignetteGerald               -0.388073   0.071771  -5.407 0.0000000640507748
## condIgnorance:vignetteEmma    0.969825   0.130275   7.444 0.0000000000000974
## condKnowledge:vignetteEmma    0.400922   0.112501   3.564           0.000366
## condIgnorance:vignetteGerald  0.229634   0.111994   2.050           0.040325
## condKnowledge:vignetteGerald  0.022343   0.104374   0.214           0.830495
##                                 
## (Intercept)                  ***
## compYes                         
## age                          .  
## gender2male                  *  
## education                    *  
## condIgnorance                ***
## condKnowledge                ***
## vignetteEmma                 ***
## vignetteGerald               ***
## condIgnorance:vignetteEmma   ***
## condKnowledge:vignetteEmma   ***
## condIgnorance:vignetteGerald *  
## condKnowledge:vignetteGerald    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
summary(log_exclude[[7]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * vignette
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15807.7  15928.3  -7887.8  15775.7    13879 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.7804 -0.5717 -0.4484  0.7819  3.1287 
## 
## Random effects:
##  Groups                    Name        Variance           Std.Dev.    
##  person_code:(id:vignette) (Intercept) 0.0000000000172891 0.0000041580
##  id:vignette               (Intercept) 0.0000000000007192 0.0000008481
##  vignette                  (Intercept) 0.0000000000000000 0.0000000000
## Number of obs: 13895, groups:  
## person_code:(id:vignette), 13895; id:vignette, 13895; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value          Pr(>|z|)    
## (Intercept)                   0.555020   0.126409   4.391 0.000011300772713 ***
## compYes                       0.016780   0.044844   0.374          0.708268    
## age                           0.003778   0.002004   1.886          0.059360 .  
## gender2male                  -0.087674   0.042794  -2.049          0.040489 *  
## education                    -0.016600   0.007561  -2.196          0.028122 *  
## condIgnorance                -1.604292   0.080023 -20.048           < 2e-16 ***
## condKnowledge                 0.503172   0.076198   6.603 0.000000000040161 ***
## vignetteEmma                 -1.930434   0.084439 -22.862           < 2e-16 ***
## vignetteGerald               -0.390320   0.073064  -5.342 0.000000091833949 ***
## condIgnorance:vignetteEmma    0.977499   0.132791   7.361 0.000000000000182 ***
## condKnowledge:vignetteEmma    0.395026   0.114528   3.449          0.000562 ***
## condIgnorance:vignetteGerald  0.223192   0.113776   1.962          0.049800 *  
## condKnowledge:vignetteGerald  0.021780   0.106168   0.205          0.837455    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')

Turk

  • Results from coefficients are within a confidence interval of each other.
View full results
k.turk.exclude <- glmer(know_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*turk,
                      data = exclude_DF,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(k.turk.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * turk
##    Data: exclude_DF
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  16374.2  16472.5  -8174.1  16348.2    14229 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.0596 -0.6286 -0.4915  0.7954  3.8176 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.0000   0.0000  
##  id:vignette               (Intercept) 0.0000   0.0000  
##  vignette                  (Intercept) 0.4234   0.6507  
## Number of obs: 14242, groups:  
## person_code:(id:vignette), 14242; id:vignette, 14242; vignette, 3
## 
## Fixed effects:
##                          Estimate Std. Error z value Pr(>|z|)    
## (Intercept)            -0.0903101  0.3944471  -0.229  0.81890    
## compYes                -0.0310858  0.0455190  -0.683  0.49466    
## age                    -0.0002424  0.0021899  -0.111  0.91185    
## gender2male            -0.1159354  0.0424451  -2.731  0.00631 ** 
## education              -0.0157819  0.0074212  -2.127  0.03345 *  
## condIgnorance          -1.2512205  0.0513191 -24.381  < 2e-16 ***
## condKnowledge           0.5906307  0.0453756  13.016  < 2e-16 ***
## turkTRUE                0.3158457  0.1224020   2.580  0.00987 ** 
## condIgnorance:turkTRUE -0.2909027  0.1875978  -1.551  0.12098    
## condKnowledge:turkTRUE  0.2795678  0.1689012   1.655  0.09788 .  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.090                                                        
## age         -0.125  0.285                                                 
## gender2male -0.038  0.053 -0.021                                          
## education   -0.236 -0.120 -0.113  0.029                                   
## condIgnornc -0.052  0.005  0.000  0.012  0.006                            
## condKnowldg -0.056 -0.001 -0.002 -0.006 -0.005  0.434                     
## turkTRUE     0.031 -0.184 -0.286 -0.098  0.014  0.161  0.186              
## cndIgn:TRUE  0.014 -0.001  0.001  0.005 -0.001 -0.271 -0.121 -0.586       
## cndKnw:TRUE  0.015  0.000  0.002  0.001  0.000 -0.119 -0.266 -0.650  0.424
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
summary(log_exclude[[8]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: know_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * turk
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15850.2  15948.2  -7912.1  15824.2    13882 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1117 -0.6244 -0.4852  0.7822  3.9470 
## 
## Random effects:
##  Groups                    Name        Variance           Std.Dev.    
##  person_code:(id:vignette) (Intercept) 0.0000000000002108 0.0000004591
##  id:vignette               (Intercept) 0.0000000000024070 0.0000015515
##  vignette                  (Intercept) 0.4494139896495767 0.6703834646
## Number of obs: 13895, groups:  
## person_code:(id:vignette), 13895; id:vignette, 13895; vignette, 3
## 
## Fixed effects:
##                          Estimate Std. Error z value Pr(>|z|)    
## (Intercept)            -0.0743861  0.4058531  -0.183  0.85458    
## compYes                -0.0330383  0.0463320  -0.713  0.47580    
## age                    -0.0001138  0.0022169  -0.051  0.95906    
## gender2male            -0.1113136  0.0431663  -2.579  0.00992 ** 
## education              -0.0164575  0.0075305  -2.185  0.02886 *  
## condIgnorance          -1.2894721  0.0521660 -24.719  < 2e-16 ***
## condKnowledge           0.5914056  0.0461318  12.820  < 2e-16 ***
## turkTRUE                0.3226120  0.1260957   2.558  0.01051 *  
## condIgnorance:turkTRUE -0.3324922  0.1930846  -1.722  0.08507 .  
## condKnowledge:turkTRUE  0.2986885  0.1735316   1.721  0.08521 .  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.090                                                        
## age         -0.124  0.288                                                 
## gender2male -0.039  0.053 -0.021                                          
## education   -0.233 -0.119 -0.112  0.032                                   
## condIgnornc -0.052  0.006  0.001  0.011  0.007                            
## condKnowldg -0.055 -0.001 -0.002 -0.006 -0.005  0.433                     
## turkTRUE     0.031 -0.182 -0.282 -0.098  0.011  0.158  0.184              
## cndIgn:TRUE  0.013 -0.001  0.001  0.007  0.001 -0.268 -0.119 -0.588       
## cndKnw:TRUE  0.014  0.000  0.002 -0.001  0.002 -0.118 -0.263 -0.654  0.427
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')

Reasonable

Condition

  • Results from coefficients are within a confidence interval of each other.
View full results
exclude_DF$reason_vas_combined <- exclude_DF$reason_vas_combined - 1
r.cond.exclude <- glmer(reason_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond,
                      data = exclude_DF,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(r.cond.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond
##    Data: exclude_DF
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7618.2   7693.8  -3799.1   7598.2    14238 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -6.1506  0.2213  0.2683  0.3204  0.5695 
## 
## Random effects:
##  Groups                    Name        Variance          Std.Dev.   
##  person_code:(id:vignette) (Intercept) 0.006747443567930 0.082142824
##  id:vignette               (Intercept) 0.000000000007386 0.000002718
##  vignette                  (Intercept) 0.065531753404023 0.255991706
## Number of obs: 14248, groups:  
## person_code:(id:vignette), 14248; id:vignette, 14248; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value           Pr(>|z|)    
## (Intercept)    1.784498   0.236073   7.559 0.0000000000000406 ***
## compYes        0.231500   0.069489   3.331           0.000864 ***
## age           -0.003719   0.003117  -1.193           0.232864    
## gender2male   -0.198665   0.067234  -2.955           0.003128 ** 
## education      0.052064   0.011527   4.516 0.0000062871224868 ***
## condIgnorance -0.346888   0.072294  -4.798 0.0000016002058570 ***
## condKnowledge  0.449632   0.085877   5.236 0.0000001642958072 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.203                                   
## age         -0.301  0.200                            
## gender2male -0.086  0.014 -0.093                     
## education   -0.611 -0.100 -0.097  0.030              
## condIgnornc -0.174 -0.005  0.001  0.001 -0.001       
## condKnowldg -0.151  0.001  0.001 -0.002  0.006  0.480
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
summary(log_exclude[[14]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7047.1   7122.6  -3513.6   7027.1    13964 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -6.6491  0.2093  0.2544  0.3073  0.5823 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.001461 0.03822 
##  id:vignette               (Intercept) 0.007296 0.08542 
##  vignette                  (Intercept) 0.076656 0.27687 
## Number of obs: 13974, groups:  
## person_code:(id:vignette), 13974; id:vignette, 13974; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value          Pr(>|z|)    
## (Intercept)    1.839819   0.248928   7.391 0.000000000000146 ***
## compYes        0.251565   0.072504   3.470          0.000521 ***
## age           -0.004968   0.003209  -1.548          0.121581    
## gender2male   -0.181153   0.070632  -2.565          0.010326 *  
## education      0.057713   0.011926   4.839 0.000001303215506 ***
## condIgnorance -0.400271   0.075940  -5.271 0.000000135772713 ***
## condKnowledge  0.425613   0.090671   4.694 0.000002678579960 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.202                                   
## age         -0.297  0.201                            
## gender2male -0.085  0.014 -0.095                     
## education   -0.599 -0.098 -0.097  0.030              
## condIgnornc -0.178 -0.005  0.002  0.002  0.001       
## condKnowldg -0.154  0.003  0.003 -0.002  0.007  0.487

Vignette

  • Results from coefficients are within a confidence interval of each other.
View full results
r.vignette.exclude <- glmer(reason_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*vignette,
                      data = exclude_DF,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(r.vignette.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * vignette
##    Data: exclude_DF
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7594.4   7715.4  -3781.2   7562.4    14232 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -6.9075  0.2153  0.2680  0.3298  0.5401 
## 
## Random effects:
##  Groups                    Name        Variance    Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.001283669 0.035828
##  id:vignette               (Intercept) 0.000001523 0.001234
##  vignette                  (Intercept) 0.000000000 0.000000
## Number of obs: 14248, groups:  
## person_code:(id:vignette), 14248; id:vignette, 14248; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value          Pr(>|z|)    
## (Intercept)                   2.307115   0.211421  10.912           < 2e-16 ***
## compYes                       0.228689   0.069547   3.288           0.00101 ** 
## age                          -0.003725   0.003127  -1.191           0.23355    
## gender2male                  -0.199997   0.067279  -2.973           0.00295 ** 
## education                     0.052870   0.011533   4.584 0.000004560255809 ***
## condIgnorance                -0.769447   0.148472  -5.182 0.000000219011965 ***
## condKnowledge                 0.449781   0.189475   2.374           0.01760 *  
## vignetteEmma                 -1.025109   0.143274  -7.155 0.000000000000837 ***
## vignetteGerald               -0.423609   0.156103  -2.714           0.00665 ** 
## condIgnorance:vignetteEmma    0.726863   0.184537   3.939 0.000081875177357 ***
## condKnowledge:vignetteEmma    0.231065   0.229668   1.006           0.31438    
## condIgnorance:vignetteGerald  0.337224   0.197176   1.710           0.08722 .  
## condKnowledge:vignetteGerald -0.304036   0.239462  -1.270           0.20421    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
summary(log_exclude[[15]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * vignette
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7025.8   7146.5  -3496.9   6993.8    13958 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -7.4722  0.2020  0.2562  0.3173  0.5561 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.000000 0.00000 
##  id:vignette               (Intercept) 0.002124 0.04609 
##  vignette                  (Intercept) 0.000000 0.00000 
## Number of obs: 13974, groups:  
## person_code:(id:vignette), 13974; id:vignette, 13974; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value        Pr(>|z|)    
## (Intercept)                   2.425110   0.222940  10.878         < 2e-16 ***
## compYes                       0.248903   0.072555   3.431        0.000602 ***
## age                          -0.005006   0.003218  -1.556        0.119764    
## gender2male                  -0.181886   0.070666  -2.574        0.010056 *  
## education                     0.058594   0.011936   4.909 0.0000009160223 ***
## condIgnorance                -0.863705   0.160582  -5.379 0.0000000750731 ***
## condKnowledge                 0.390875   0.204642   1.910        0.056127 .  
## vignetteEmma                 -1.099565   0.155615  -7.066 0.0000000000016 ***
## vignetteGerald               -0.522306   0.168276  -3.104        0.001910 ** 
## condIgnorance:vignetteEmma    0.741195   0.196886   3.765        0.000167 ***
## condKnowledge:vignetteEmma    0.256617   0.245839   1.044        0.296559    
## condIgnorance:vignetteGerald  0.424379   0.210062   2.020        0.043357 *  
## condKnowledge:vignetteGerald -0.237181   0.255828  -0.927        0.353869    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')

Turk

  • Results from coefficients are within a confidence interval of each other.
View full results
r.turk.exclude <- glmer(reason_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*turk,
                      data = exclude_DF,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(r.turk.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * turk
##    Data: exclude_DF
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7595.4   7693.8  -3784.7   7569.4    14235 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -7.1773  0.2174  0.2682  0.3211  0.6700 
## 
## Random effects:
##  Groups                    Name        Variance  Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.0017023 0.04126 
##  id:vignette               (Intercept) 0.0002381 0.01543 
##  vignette                  (Intercept) 0.0656375 0.25620 
## Number of obs: 14248, groups:  
## person_code:(id:vignette), 14248; id:vignette, 14248; vignette, 3
## 
## Fixed effects:
##                         Estimate Std. Error z value     Pr(>|z|)    
## (Intercept)             1.984053   0.238726   8.311      < 2e-16 ***
## compYes                 0.128874   0.072531   1.777     0.075599 .  
## age                    -0.010321   0.003297  -3.131     0.001744 ** 
## gender2male            -0.243720   0.067707  -3.600     0.000319 ***
## education               0.051489   0.011357   4.533 0.0000058015 ***
## condIgnorance          -0.324940   0.073919  -4.396 0.0000110332 ***
## condKnowledge           0.468246   0.087756   5.336 0.0000000951 ***
## turkTRUE                1.163850   0.317981   3.660     0.000252 ***
## condIgnorance:turkTRUE -0.559833   0.375913  -1.489     0.136418    
## condKnowledge:turkTRUE -0.468951   0.442526  -1.060     0.289274    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.252                                                        
## age         -0.343  0.310                                                 
## gender2male -0.111  0.053 -0.035                                          
## education   -0.596 -0.096 -0.089  0.034                                   
## condIgnornc -0.175 -0.003  0.004  0.003  0.000                            
## condKnowldg -0.151  0.001  0.000 -0.003  0.006  0.475                     
## turkTRUE     0.050 -0.124 -0.173 -0.057 -0.003  0.131  0.111              
## cndIgn:TRUE  0.035  0.001  0.000 -0.002  0.000 -0.197 -0.093 -0.813       
## cndKnw:TRUE  0.030  0.000  0.001  0.001 -0.002 -0.094 -0.198 -0.691  0.584
summary(log_exclude[[16]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * turk
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7017.4   7115.5  -3495.7   6991.4    13961 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -8.5047  0.2054  0.2546  0.3087  0.6997 
## 
## Random effects:
##  Groups                    Name        Variance     Std.Dev. 
##  person_code:(id:vignette) (Intercept) 0.0023770476 0.0487550
##  id:vignette               (Intercept) 0.0000001472 0.0003837
##  vignette                  (Intercept) 0.0768855931 0.2772825
## Number of obs: 13974, groups:  
## person_code:(id:vignette), 13974; id:vignette, 13974; vignette, 3
## 
## Fixed effects:
##                         Estimate Std. Error z value   Pr(>|z|)    
## (Intercept)             2.073866   0.251554   8.244    < 2e-16 ***
## compYes                 0.131776   0.075708   1.741   0.081757 .  
## age                    -0.012466   0.003376  -3.692   0.000222 ***
## gender2male            -0.232380   0.071089  -3.269   0.001080 ** 
## education               0.056843   0.011725   4.848 0.00000125 ***
## condIgnorance          -0.378343   0.077451  -4.885 0.00000103 ***
## condKnowledge           0.440103   0.092314   4.767 0.00000187 ***
## turkTRUE                1.385038   0.368873   3.755   0.000173 ***
## condIgnorance:turkTRUE -0.655384   0.427927  -1.532   0.125638    
## condKnowledge:turkTRUE -0.443704   0.514800  -0.862   0.388745    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.251                                                        
## age         -0.338  0.315                                                 
## gender2male -0.109  0.053 -0.039                                          
## education   -0.583 -0.094 -0.088  0.035                                   
## condIgnornc -0.180 -0.003  0.005  0.004  0.001                            
## condKnowldg -0.154  0.002  0.001 -0.003  0.007  0.484                     
## turkTRUE     0.044 -0.113 -0.156 -0.050 -0.003  0.120  0.101              
## cndIgn:TRUE  0.032  0.001  0.001 -0.002 -0.001 -0.181 -0.088 -0.835       
## cndKnw:TRUE  0.027  0.000  0.001  0.001 -0.002 -0.087 -0.179 -0.694  0.598

Luck

Condition

  • Results from coefficients are within a confidence interval of each other.
View full results
exclude_luck$luck_vas_combined <- exclude_luck$luck_vas_combined - 1
l.cond.exclude <- glmer(luck_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond,
                      data = exclude_luck,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(l.cond.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond
##    Data: exclude_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  16878.5  16953.5  -8429.3  16858.5    13295 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1704 -0.9144  0.4729  0.8213  2.1331 
## 
## Random effects:
##  Groups                    Name        Variance  Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.0008215 0.02866 
##  id:vignette               (Intercept) 0.0099704 0.09985 
##  vignette                  (Intercept) 0.3395371 0.58270 
## Number of obs: 13305, groups:  
## person_code:(id:vignette), 13305; id:vignette, 13305; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value  Pr(>|z|)    
## (Intercept)    0.222316   0.355676   0.625    0.5319    
## compYes        0.169379   0.042610   3.975 0.0000703 ***
## age           -0.004508   0.001915  -2.355    0.0185 *  
## gender2male   -0.028417   0.040747  -0.697    0.4855    
## education      0.031464   0.007282   4.321 0.0000155 ***
## condIgnorance -0.984890   0.045897 -21.459   < 2e-16 ***
## condKnowledge -0.894174   0.045646 -19.589   < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.080                                   
## age         -0.117  0.186                            
## gender2male -0.030  0.013 -0.089                     
## education   -0.259 -0.114 -0.110  0.025              
## condIgnornc -0.065 -0.009  0.016  0.005 -0.013       
## condKnowldg -0.063 -0.012  0.009  0.003 -0.015  0.525
summary(log_exclude[[22]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond
##    Data: final_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15896.2  15970.6  -7938.1  15876.2    12630 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1575 -0.8039 -0.4574  0.9076  2.2438 
## 
## Random effects:
##  Groups                    Name        Variance Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.009841 0.09920 
##  id:vignette               (Intercept) 0.004391 0.06626 
##  vignette                  (Intercept) 0.373469 0.61112 
## Number of obs: 12640, groups:  
## person_code:(id:vignette), 12640; id:vignette, 12640; vignette, 3
## 
## Fixed effects:
##                Estimate Std. Error z value  Pr(>|z|)    
## (Intercept)   -0.246316   0.372248  -0.662    0.5082    
## compYes       -0.180694   0.043976  -4.109 0.0000398 ***
## age            0.004153   0.001967   2.111    0.0348 *  
## gender2male    0.007267   0.042185   0.172    0.8632    
## education     -0.031096   0.007485  -4.155 0.0000326 ***
## condIgnorance  1.027734   0.047414  21.676   < 2e-16 ***
## condKnowledge  0.935679   0.047106  19.863   < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn
## compYes     -0.079                                   
## age         -0.116  0.191                            
## gender2male -0.029  0.014 -0.086                     
## education   -0.253 -0.116 -0.109  0.023              
## condIgnornc -0.064 -0.007  0.018  0.002 -0.014       
## condKnowldg -0.062 -0.012  0.009 -0.001 -0.017  0.525

Vignette

  • Results from coefficients are within a confidence interval of each other.
View full results
l.vignette.exclude <- glmer(luck_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*vignette,
                      data = exclude_luck,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(l.vignette.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * vignette
##    Data: exclude_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  16430.9  16550.9  -8199.5  16398.9    13289 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -3.4902 -0.8498  0.2947  1.0165  2.1699 
## 
## Random effects:
##  Groups                    Name        Variance   Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.00234642 0.048440
##  id:vignette               (Intercept) 0.00001009 0.003176
##  vignette                  (Intercept) 0.00000000 0.000000
## Number of obs: 13305, groups:  
## person_code:(id:vignette), 13305; id:vignette, 13305; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value          Pr(>|z|)    
## (Intercept)                  -0.878536   0.125085  -7.024 0.000000000002163 ***
## compYes                       0.185387   0.043311   4.280 0.000018659951524 ***
## age                          -0.004608   0.001950  -2.363            0.0181 *  
## gender2male                  -0.016309   0.041291  -0.395            0.6929    
## education                     0.031213   0.007401   4.217 0.000024709433765 ***
## condIgnorance                -0.064460   0.076051  -0.848            0.3967    
## condKnowledge                -0.713049   0.080347  -8.875           < 2e-16 ***
## vignetteEmma                  2.720971   0.104934  25.930           < 2e-16 ***
## vignetteGerald                1.016167   0.076374  13.305           < 2e-16 ***
## condIgnorance:vignetteEmma   -2.444410   0.128950 -18.956           < 2e-16 ***
## condKnowledge:vignetteEmma   -0.988465   0.132107  -7.482 0.000000000000073 ***
## condIgnorance:vignetteGerald -0.695187   0.107310  -6.478 0.000000000092751 ***
## condKnowledge:vignetteGerald -0.048157   0.110705  -0.435            0.6636    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
summary(log_exclude[[23]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * vignette
##    Data: final_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  15458.4  15577.5  -7713.2  15426.4    12624 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.1894 -1.0118 -0.2784  0.8435  3.7019 
## 
## Random effects:
##  Groups                    Name        Variance  Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.0004629 0.02151 
##  id:vignette               (Intercept) 0.0015660 0.03957 
##  vignette                  (Intercept) 0.0000000 0.00000 
## Number of obs: 12640, groups:  
## person_code:(id:vignette), 12640; id:vignette, 12640; vignette, 3
## 
## Fixed effects:
##                               Estimate Std. Error z value           Pr(>|z|)
## (Intercept)                   0.907304   0.128577   7.056 0.0000000000017077
## compYes                      -0.195308   0.044696  -4.370 0.0000124396016865
## age                           0.004152   0.002002   2.074             0.0381
## gender2male                  -0.004211   0.042725  -0.099             0.9215
## education                    -0.031082   0.007607  -4.086 0.0000439445273061
## condIgnorance                 0.083929   0.078418   1.070             0.2845
## condKnowledge                 0.723553   0.082462   8.774            < 2e-16
## vignetteEmma                 -2.851015   0.110619 -25.773            < 2e-16
## vignetteGerald               -1.070444   0.078819 -13.581            < 2e-16
## condIgnorance:vignetteEmma    2.527798   0.135060  18.716            < 2e-16
## condKnowledge:vignetteEmma    1.067342   0.138234   7.721 0.0000000000000115
## condIgnorance:vignetteGerald  0.723264   0.110503   6.545 0.0000000000594091
## condKnowledge:vignetteGerald  0.094904   0.113794   0.834             0.4043
##                                 
## (Intercept)                  ***
## compYes                      ***
## age                          *  
## gender2male                     
## education                    ***
## condIgnorance                   
## condKnowledge                ***
## vignetteEmma                 ***
## vignetteGerald               ***
## condIgnorance:vignetteEmma   ***
## condKnowledge:vignetteEmma   ***
## condIgnorance:vignetteGerald ***
## condKnowledge:vignetteGerald    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')

Turk

  • Results from coefficients are within a confidence interval of each other.
View full results
l.turk.exclude <- glmer(luck_vas_combined ~ (1|vignette/id/person_code) + 
                    comp + age + gender2 + education + 
                    cond*turk,
                      data = exclude_luck,
                      family = binomial,
                      control = glmerControl(optimizer = "bobyqa"),
                      nAGQ = 0)

summary(l.turk.exclude)
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: luck_vas_combined ~ (1 | vignette/id/person_code) + comp + age +  
##     gender2 + education + cond * turk
##    Data: exclude_luck
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##  16860.1  16957.6  -8417.1  16834.1    13292 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -2.3274 -0.9133  0.4662  0.8292  2.2508 
## 
## Random effects:
##  Groups                    Name        Variance  Std.Dev.
##  person_code:(id:vignette) (Intercept) 0.0093557 0.09672 
##  id:vignette               (Intercept) 0.0008057 0.02838 
##  vignette                  (Intercept) 0.3393677 0.58255 
## Number of obs: 13305, groups:  
## person_code:(id:vignette), 13305; id:vignette, 13305; vignette, 3
## 
## Fixed effects:
##                         Estimate Std. Error z value   Pr(>|z|)    
## (Intercept)             0.333303   0.356602   0.935    0.34996    
## compYes                 0.116316   0.044350   2.623    0.00872 ** 
## age                    -0.008610   0.002141  -4.022 0.00005772 ***
## gender2male            -0.052788   0.041196  -1.281    0.20006    
## education               0.032480   0.007294   4.453 0.00000848 ***
## condIgnorance          -1.004532   0.047699 -21.060    < 2e-16 ***
## condKnowledge          -0.884074   0.047447 -18.633    < 2e-16 ***
## turkTRUE                0.311124   0.132378   2.350    0.01876 *  
## condIgnorance:turkTRUE  0.261993   0.175805   1.490    0.13616    
## condKnowledge:turkTRUE -0.143054   0.173024  -0.827    0.40836    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.096                                                        
## age         -0.136  0.281                                                 
## gender2male -0.039  0.050 -0.018                                          
## education   -0.256 -0.118 -0.113  0.020                                   
## condIgnornc -0.068 -0.006  0.020  0.005 -0.014                            
## condKnowldg -0.066 -0.009  0.012  0.004 -0.015  0.523                     
## turkTRUE     0.026 -0.170 -0.267 -0.086  0.018  0.179  0.182              
## cndIgn:TRUE  0.019  0.001 -0.008  0.004  0.003 -0.268 -0.139 -0.685       
## cndKnw:TRUE  0.018  0.005  0.005  0.000  0.000 -0.140 -0.272 -0.699  0.525
summary(log_exclude[[24]])
## Generalized linear mixed model fit by maximum likelihood (Adaptive
##   Gauss-Hermite Quadrature, nAGQ = 0) [glmerMod]
##  Family: binomial  ( logit )
## Formula: reason_vas_combined ~ (1 | vignette/id/person_code) + comp +  
##     age + gender2 + education + cond * turk
##    Data: final_long
## Control: glmerControl(optimizer = "bobyqa")
## 
##      AIC      BIC   logLik deviance df.resid 
##   7017.4   7115.5  -3495.7   6991.4    13961 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -8.5047  0.2054  0.2546  0.3087  0.6997 
## 
## Random effects:
##  Groups                    Name        Variance     Std.Dev. 
##  person_code:(id:vignette) (Intercept) 0.0023770476 0.0487550
##  id:vignette               (Intercept) 0.0000001472 0.0003837
##  vignette                  (Intercept) 0.0768855931 0.2772825
## Number of obs: 13974, groups:  
## person_code:(id:vignette), 13974; id:vignette, 13974; vignette, 3
## 
## Fixed effects:
##                         Estimate Std. Error z value   Pr(>|z|)    
## (Intercept)             2.073866   0.251554   8.244    < 2e-16 ***
## compYes                 0.131776   0.075708   1.741   0.081757 .  
## age                    -0.012466   0.003376  -3.692   0.000222 ***
## gender2male            -0.232380   0.071089  -3.269   0.001080 ** 
## education               0.056843   0.011725   4.848 0.00000125 ***
## condIgnorance          -0.378343   0.077451  -4.885 0.00000103 ***
## condKnowledge           0.440103   0.092314   4.767 0.00000187 ***
## turkTRUE                1.385038   0.368873   3.755   0.000173 ***
## condIgnorance:turkTRUE -0.655384   0.427927  -1.532   0.125638    
## condKnowledge:turkTRUE -0.443704   0.514800  -0.862   0.388745    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Correlation of Fixed Effects:
##             (Intr) compYs age    gndr2m eductn cndIgn cndKnw trTRUE cI:TRU
## compYes     -0.251                                                        
## age         -0.338  0.315                                                 
## gender2male -0.109  0.053 -0.039                                          
## education   -0.583 -0.094 -0.088  0.035                                   
## condIgnornc -0.180 -0.003  0.005  0.004  0.001                            
## condKnowldg -0.154  0.002  0.001 -0.003  0.007  0.484                     
## turkTRUE     0.044 -0.113 -0.156 -0.050 -0.003  0.120  0.101              
## cndIgn:TRUE  0.032  0.001  0.001 -0.002 -0.001 -0.181 -0.088 -0.835       
## cndKnw:TRUE  0.027  0.000  0.001  0.001 -0.002 -0.087 -0.179 -0.694  0.598

Graph of Area Representation

# convert country code to region code
# this will warn you about the ones that have multiple countries 
# create a world map 
world_map <- map_data(map = "world")
world_map$orig_region <- world_map$region
world_map$region <- iso.alpha(world_map$region, n = 3)
world_map <- subset(world_map, region != "ATA")

# summarize the same samples
country_summary <- full_long %>% 
      filter(!duplicated(full_long %>% select(id))) %>% 
      group_by(lab_country) %>% 
      summarize(n = n()) %>% 
      filter(!is.na(lab_country))

# make a map on a continuous scale
ggplot(country_summary) +
  geom_map(aes(map_id = lab_country, fill = n), map = world_map) +
  geom_polygon(data = world_map, 
               aes(x = long, y = lat, group = group), 
               colour = 'black', fill = NA) + 
  theme_void() + 
  scale_fill_distiller(name = "Sample Size",
                       palette = "Greys",
                       direction = 1,
                       na.value = "white") 

ggsave("figure/continuous_country.png")
  
# maybe try binning
country_summary$n_binned <- if_else(
  country_summary$n > 1000, "1000+", 
  if_else(
    country_summary$n < 1000 & country_summary$n >= 400, "400-999", 
    if_else(
      country_summary$n < 400 & country_summary$n >= 100, "100-399", 
      "< 100"
    )
  )
)

country_summary$n_binned <- factor(country_summary$n_binned, 
                                   levels = c("< 100", "100-399", 
                                              "400-999", "1000+"))

# map of binned data 
bin_country <- ggplot(country_summary) +
  geom_map(aes(map_id = lab_country, fill = n_binned), map = world_map) +
  geom_polygon(data = world_map, 
               aes(x = long, y = lat, group = group), 
               colour = 'black', fill = NA) + 
  theme_void() + 
  scale_fill_manual(name = "Sample Size",
                    values = c("#c8c8c8", "#969696", "#646464", "#323232")) 

bin_country

ggsave("figure/binned_country.png", bin_country)

# tree map
country_summary$un_region_sub <- countrycode(
  sourcevar = country_summary$lab_country,
  origin = 'iso3c', 
  destination = 'un.regionsub.name'
)
## Warning in countrycode_convert(sourcevar = sourcevar, origin = origin, destination = dest, : Some values were not matched unambiguously: TWN
country_summary$un_region <- countrycode(
  sourcevar = country_summary$lab_country,
  origin = 'iso3c', 
  destination = 'un.region.name'
)
## Warning in countrycode_convert(sourcevar = sourcevar, origin = origin, destination = dest, : Some values were not matched unambiguously: TWN
country_summary$un_region[is.na(country_summary$un_region)] <- "Asia"
country_summary$un_region_sub[is.na(country_summary$un_region_sub)] <- "Eastern Asia"

tree <- ggplot(country_summary, aes(area = n, fill = n_binned,
               label = lab_country, subgroup = un_region_sub)) +
  geom_treemap() +
  geom_treemap_subgroup_border(colour = "white", size = 5) +
  # geom_treemap_subgroup_text(place = "top", grow = TRUE,
  #                            alpha = 0.25, colour = "black",
  #                            fontface = "italic") +
  geom_treemap_text(colour = "white", place = "centre",
                    size = 15, grow = FALSE) +  
  scale_fill_manual(name = "Sample Size",
                    values = c("#c8c8c8", "#969696", "#646464", "#323232")) 
  # scale_fill_gradient(name = "Sample Size",
  #                     low = "#c8c8c8", 
  #                     high = "#323232") 

tree 

ggsave("figure/treemap.png",
       tree)

Forest Plot of Effects

Knowledge

table(final_long$lab_country, useNA = "ifany")
## 
##  AUS  AUT  CAN  CHE  DEU  GBR  GRC  HUN  NOR  NZL  POL  PRT  ROU  RUS  SGP  SVK 
##  723  453  774   84 2316  396  156 1347  228  126  579  243 1113  297  156  315 
##  TUR  TWN  USA 
##  231  267 4674
unique_country <- unique(final_long$lab_country)

country_results <- list()
for (i in unique_country){
  
  temp_data <- final_long %>% 
    filter(lab_country == i)
  
  k_table <- table(temp_data$know_vas_combined, temp_data$cond)
  
  tryCatch(k_GI <- v.chi.sq(x2 = prop.test(t(k_table[2:1, 1:2]))$statistic, 
         n = sum(t(k_table[2:1, 1:2])),
         r = 2, c = 2), 
         warning = function(w) { 
      k_GI <<- v.chi.sq(x2 = prop.test(t(k_table[2:1, 1:2]))$statistic, 
         n = sum(t(k_table[2:1, 1:2])),
         r = 2, c = 2)
      k_GI$vlow <<- 0
    }
  )
  
  tryCatch(  k_GK <- v.chi.sq(x2 = prop.test(t(k_table[2:1, c(1,3)]))$statistic, 
         n = sum(t(k_table[2:1, c(1,3)])),
         r = 2, c = 2), 
         warning = function(w) { 
      k_GK <<- v.chi.sq(x2 = prop.test(t(k_table[2:1, c(1,3)]))$statistic, 
         n = sum(t(k_table[2:1, c(1,3)])),
         r = 2, c = 2)
      k_GK$vlow <<- 0
    }
  )
  
  country_results[[i]]$country <- i
  country_results[[i]]$GI <- k_GI$v
  country_results[[i]]$GI_low <- k_GI$vlow
  country_results[[i]]$GI_high <- k_GI$vhigh
  country_results[[i]]$GK <- k_GK$v
  country_results[[i]]$GK_low <- k_GK$vlow
  country_results[[i]]$GK_high <- k_GK$vhigh
  country_results[[i]]$sample <- nrow(temp_data)
  
}
## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).

## Warning: The size of the effect combined with the degrees of freedom is too
## small to determine a lower confidence limit for the 'alpha.lower' (or the
## (1/2)(1-'conf.level') symmetric) value specified (set to zero).
country_DF <- bind_rows(country_results)

ggplot(country_DF, aes(country, GI)) + 
  theme_classic() + 
  geom_point(aes(size = sample)) + 
  geom_errorbar(aes(ymin = GI_low, ymax = GI_high)) + 
  ylab("Knowledge Cramer's V for Gettier-Ignorance") + 
  xlab("Geopolitical Region") + 
  coord_flip() + 
  theme(legend.position = "none")

ggplot(country_DF, aes(country, GK)) + 
  theme_classic() + 
  geom_point(aes(size = sample)) + 
  geom_errorbar(aes(ymin = GK_low, ymax = GK_high)) + 
  ylab("Knowledge Cramer's V for Gettier-Knowledge") + 
  xlab("Geopolitical Region") + 
  coord_flip() + 
  theme(legend.position = "none")